Compare commits
10 Commits
2e89761b0f
..
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 83b6620093 | |||
| a57fef4d44 | |||
| 9384581aee | |||
| 3143dd7263 | |||
| cf78e14709 | |||
| 72f28184ff | |||
| 486aca08a8 | |||
| 1549098eac | |||
| 486bf3b9a4 | |||
| e2753e4eac |
@@ -3,7 +3,8 @@
|
|||||||
# audit-skills.sh - Verify skill completeness and health
|
# audit-skills.sh - Verify skill completeness and health
|
||||||
# Part of LCBP3-DMS Phase 2 improvements
|
# Part of LCBP3-DMS Phase 2 improvements
|
||||||
|
|
||||||
set -euo pipefail
|
set -uo pipefail
|
||||||
|
# Note: no -e — we let per-skill checks accumulate issues without terminating
|
||||||
|
|
||||||
# Colors for output
|
# Colors for output
|
||||||
RED='\033[0;31m'
|
RED='\033[0;31m'
|
||||||
@@ -13,7 +14,7 @@ BLUE='\033[0;34m'
|
|||||||
NC='\033[0m' # No Color
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
# Base directory
|
# Base directory
|
||||||
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." && pwd)"
|
||||||
AGENTS_DIR="$BASE_DIR/.agents"
|
AGENTS_DIR="$BASE_DIR/.agents"
|
||||||
SKILLS_DIR="$AGENTS_DIR/skills"
|
SKILLS_DIR="$AGENTS_DIR/skills"
|
||||||
|
|
||||||
@@ -56,24 +57,18 @@ check_skill_health() {
|
|||||||
echo -e " ${GREEN} FIELD${NC}: $field"
|
echo -e " ${GREEN} FIELD${NC}: $field"
|
||||||
else
|
else
|
||||||
echo -e " ${RED} MISSING FIELD${NC}: $field"
|
echo -e " ${RED} MISSING FIELD${NC}: $field"
|
||||||
((issues++))
|
((issues++)) || true
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Check for Role section
|
# Check for LCBP3 context reference (speckit-* skills only)
|
||||||
if grep -q "^## Role$" "$skill_file"; then
|
if [[ "$skill_name" == speckit-* ]]; then
|
||||||
echo -e " ${GREEN} SECTION${NC}: Role"
|
if grep -q '_LCBP3-CONTEXT\.md' "$skill_file"; then
|
||||||
else
|
echo -e " ${GREEN} CONTEXT${NC}: LCBP3 appendix referenced"
|
||||||
echo -e " ${YELLOW} MISSING SECTION${NC}: Role"
|
else
|
||||||
((issues++))
|
echo -e " ${YELLOW} MISSING${NC}: LCBP3 context reference"
|
||||||
fi
|
((issues++)) || true
|
||||||
|
fi
|
||||||
# Check for Task section
|
|
||||||
if grep -q "^## Task$" "$skill_file"; then
|
|
||||||
echo -e " ${GREEN} SECTION${NC}: Task"
|
|
||||||
else
|
|
||||||
echo -e " ${YELLOW} MISSING SECTION${NC}: Task"
|
|
||||||
((issues++))
|
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -84,7 +79,15 @@ check_skill_health() {
|
|||||||
get_skill_version() {
|
get_skill_version() {
|
||||||
local skill_file="$1"
|
local skill_file="$1"
|
||||||
if [[ -f "$skill_file" ]]; then
|
if [[ -f "$skill_file" ]]; then
|
||||||
grep "^version:" "$skill_file" | head -1 | sed 's/version: *//' || echo "unknown"
|
# Match 'version: X.Y.Z' (or quoted) at a LINE START only; ignore nested ` version:` fields.
|
||||||
|
# Output: bare X.Y.Z with no quotes/whitespace.
|
||||||
|
local raw
|
||||||
|
raw=$(grep -E "^version:[[:space:]]*['\"]?[0-9]+\.[0-9]+\.[0-9]+" "$skill_file" | head -1 || true)
|
||||||
|
if [[ -n "$raw" ]]; then
|
||||||
|
printf '%s' "$raw" | sed -E "s/^version:[[:space:]]*['\"]?([0-9]+\.[0-9]+\.[0-9]+).*/\1/"
|
||||||
|
else
|
||||||
|
echo "unknown"
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo "no_file"
|
echo "no_file"
|
||||||
fi
|
fi
|
||||||
@@ -114,11 +117,15 @@ SKILL_SUMMARY=()
|
|||||||
|
|
||||||
for skill_dir in "${SKILL_DIRS[@]}"; do
|
for skill_dir in "${SKILL_DIRS[@]}"; do
|
||||||
skill_name="$(basename "$skill_dir")"
|
skill_name="$(basename "$skill_dir")"
|
||||||
|
# Skip non-skill entries (e.g. _LCBP3-CONTEXT.md would not match here; safe)
|
||||||
|
[[ "$skill_name" == _* ]] && continue
|
||||||
echo "Auditing: $skill_name"
|
echo "Auditing: $skill_name"
|
||||||
echo "------------------------"
|
echo "------------------------"
|
||||||
|
|
||||||
|
set +e
|
||||||
check_skill_health "$skill_dir"
|
check_skill_health "$skill_dir"
|
||||||
issues=$?
|
issues=$?
|
||||||
|
set -u
|
||||||
|
|
||||||
skill_version=$(get_skill_version "$skill_dir/SKILL.md")
|
skill_version=$(get_skill_version "$skill_dir/SKILL.md")
|
||||||
SKILL_SUMMARY+=("$skill_name:$issues:$skill_version")
|
SKILL_SUMMARY+=("$skill_name:$issues:$skill_version")
|
||||||
@@ -147,7 +154,7 @@ echo
|
|||||||
# Check skills.md version consistency
|
# Check skills.md version consistency
|
||||||
SKILLS_VERSION_FILE="$SKILLS_DIR/VERSION"
|
SKILLS_VERSION_FILE="$SKILLS_DIR/VERSION"
|
||||||
if [[ -f "$SKILLS_VERSION_FILE" ]]; then
|
if [[ -f "$SKILLS_VERSION_FILE" ]]; then
|
||||||
global_version=$(grep "^version:" "$SKILLS_VERSION_FILE" | sed 's/version: *//')
|
global_version=$(grep "^version:" "$SKILLS_VERSION_FILE" | sed 's/version: *//' | tr -d '\r\n ')
|
||||||
echo "Global skills version: v$global_version"
|
echo "Global skills version: v$global_version"
|
||||||
echo
|
echo
|
||||||
|
|
||||||
|
|||||||
@@ -12,11 +12,11 @@ YELLOW='\033[1;33m'
|
|||||||
NC='\033[0m' # No Color
|
NC='\033[0m' # No Color
|
||||||
|
|
||||||
# Base directory
|
# Base directory
|
||||||
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
BASE_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../.." && pwd)"
|
||||||
AGENTS_DIR="$BASE_DIR/.agents"
|
AGENTS_DIR="$BASE_DIR/.agents"
|
||||||
|
|
||||||
# Expected version (should match LCBP3 version)
|
# Expected version (should match LCBP3 version)
|
||||||
EXPECTED_VERSION="1.8.6"
|
EXPECTED_VERSION="1.8.9"
|
||||||
|
|
||||||
echo "=== .agents Version Validation ==="
|
echo "=== .agents Version Validation ==="
|
||||||
echo "Base directory: $BASE_DIR"
|
echo "Base directory: $BASE_DIR"
|
||||||
@@ -37,10 +37,8 @@ extract_version() {
|
|||||||
|
|
||||||
# Files to check
|
# Files to check
|
||||||
declare -A FILES_TO_CHECK=(
|
declare -A FILES_TO_CHECK=(
|
||||||
["$AGENTS_DIR/README.md"]="Version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)"
|
|
||||||
["$AGENTS_DIR/skills/VERSION"]="version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)"
|
["$AGENTS_DIR/skills/VERSION"]="version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)"
|
||||||
["$AGENTS_DIR/rules/00-project-context.md"]="Version: \([0-9]\+\.[0-9]\+\.[0-9]\+\)"
|
["$AGENTS_DIR/skills/skills.md"]="[Vv]\([0-9]\+\.[0-9]\+\.[0-9]\+\)"
|
||||||
["$AGENTS_DIR/skills/skills.md"]="V\([0-9]\+\.[0-9]\+\.[0-9]\+\)"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Track issues
|
# Track issues
|
||||||
|
|||||||
@@ -2,16 +2,16 @@
|
|||||||
# Part of LCBP3-DMS Phase 2 improvements
|
# Part of LCBP3-DMS Phase 2 improvements
|
||||||
|
|
||||||
param(
|
param(
|
||||||
[string]$BaseDir = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot))
|
[string]$BaseDir = (Split-Path -Parent (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)))
|
||||||
)
|
)
|
||||||
|
|
||||||
# Colors for output
|
# Map to ConsoleColor enum (Write-Host expects enum, not ANSI strings)
|
||||||
$Colors = @{
|
$Colors = @{
|
||||||
Red = "`e[0;31m"
|
Red = 'Red'
|
||||||
Green = "`e[0;32m"
|
Green = 'Green'
|
||||||
Yellow = "`e[1;33m"
|
Yellow = 'Yellow'
|
||||||
Blue = "`e[0;34m"
|
Blue = 'Blue'
|
||||||
NoColor = "`e[0m"
|
NoColor = 'Gray'
|
||||||
}
|
}
|
||||||
|
|
||||||
$AgentsDir = Join-Path $BaseDir ".agents"
|
$AgentsDir = Join-Path $BaseDir ".agents"
|
||||||
@@ -55,9 +55,10 @@ function Test-SkillHealth {
|
|||||||
$content = Get-Content $skillFile -Raw
|
$content = Get-Content $skillFile -Raw
|
||||||
|
|
||||||
# Check for required front matter fields
|
# Check for required front matter fields
|
||||||
$requiredFields = @("name", "description", "version")
|
$requiredFields = @('name', 'description', 'version')
|
||||||
foreach ($field in $requiredFields) {
|
foreach ($field in $requiredFields) {
|
||||||
if ($content -match "^$field:") {
|
$pattern = "(?m)^${field}:"
|
||||||
|
if ($content -match $pattern) {
|
||||||
Write-Host " FIELD: $field" -ForegroundColor $Colors.Green
|
Write-Host " FIELD: $field" -ForegroundColor $Colors.Green
|
||||||
} else {
|
} else {
|
||||||
Write-Host " MISSING FIELD: $field" -ForegroundColor $Colors.Red
|
Write-Host " MISSING FIELD: $field" -ForegroundColor $Colors.Red
|
||||||
@@ -65,20 +66,14 @@ function Test-SkillHealth {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Check for Role section
|
# Check for LCBP3 context reference (speckit-* skills)
|
||||||
if ($content -match "^## Role$") {
|
if ($skillName -like 'speckit-*') {
|
||||||
Write-Host " SECTION: Role" -ForegroundColor $Colors.Green
|
if ($content -match '_LCBP3-CONTEXT\.md') {
|
||||||
} else {
|
Write-Host " CONTEXT: LCBP3 appendix referenced" -ForegroundColor $Colors.Green
|
||||||
Write-Host " MISSING SECTION: Role" -ForegroundColor $Colors.Yellow
|
} else {
|
||||||
$issues++
|
Write-Host " MISSING: LCBP3 context reference" -ForegroundColor $Colors.Yellow
|
||||||
}
|
$issues++
|
||||||
|
}
|
||||||
# Check for Task section
|
|
||||||
if ($content -match "^## Task$") {
|
|
||||||
Write-Host " SECTION: Task" -ForegroundColor $Colors.Green
|
|
||||||
} else {
|
|
||||||
Write-Host " MISSING SECTION: Task" -ForegroundColor $Colors.Yellow
|
|
||||||
$issues++
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -94,7 +89,7 @@ function Get-SkillVersion {
|
|||||||
if (Test-Path $SkillFile) {
|
if (Test-Path $SkillFile) {
|
||||||
try {
|
try {
|
||||||
$content = Get-Content $SkillFile -Raw
|
$content = Get-Content $SkillFile -Raw
|
||||||
if ($content -match "^version:\s*(.+)") {
|
if ($content -match "(?m)^version:\s*['""]?([0-9]+\.[0-9]+\.[0-9]+)['""]?") {
|
||||||
return $matches[1].Trim()
|
return $matches[1].Trim()
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
|
|||||||
@@ -2,16 +2,16 @@
|
|||||||
# Part of LCBP3-DMS Phase 2 improvements
|
# Part of LCBP3-DMS Phase 2 improvements
|
||||||
|
|
||||||
param(
|
param(
|
||||||
[string]$BaseDir = (Split-Path -Parent (Split-Path -Parent $PSScriptRoot)),
|
[string]$BaseDir = (Split-Path -Parent (Split-Path -Parent (Split-Path -Parent $PSScriptRoot))),
|
||||||
[string]$ExpectedVersion = "1.8.6"
|
[string]$ExpectedVersion = "1.8.9"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Colors for output
|
# Map to ConsoleColor enum (Write-Host expects enum, not ANSI)
|
||||||
$Colors = @{
|
$Colors = @{
|
||||||
Red = "`e[0;31m"
|
Red = 'Red'
|
||||||
Green = "`e[0;32m"
|
Green = 'Green'
|
||||||
Yellow = "`e[1;33m"
|
Yellow = 'Yellow'
|
||||||
NoColor = "`e[0m"
|
NoColor = 'Gray'
|
||||||
}
|
}
|
||||||
|
|
||||||
$AgentsDir = Join-Path $BaseDir ".agents"
|
$AgentsDir = Join-Path $BaseDir ".agents"
|
||||||
@@ -46,9 +46,7 @@ function Get-VersionFromFile {
|
|||||||
|
|
||||||
# Files to check
|
# Files to check
|
||||||
$FilesToCheck = @{
|
$FilesToCheck = @{
|
||||||
(Join-Path $AgentsDir "README.md") = "Version: ([0-9]+\.[0-9]+\.[0-9]+)"
|
|
||||||
(Join-Path $AgentsDir "skills\VERSION") = "version: ([0-9]+\.[0-9]+\.[0-9]+)"
|
(Join-Path $AgentsDir "skills\VERSION") = "version: ([0-9]+\.[0-9]+\.[0-9]+)"
|
||||||
(Join-Path $AgentsDir "rules\00-project-context.md") = "Version: ([0-9]+\.[0-9]+\.[0-9]+)"
|
|
||||||
(Join-Path $AgentsDir "skills\skills.md") = "V([0-9]+\.[0-9]+\.[0-9]+)"
|
(Join-Path $AgentsDir "skills\skills.md") = "V([0-9]+\.[0-9]+\.[0-9]+)"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,109 @@
|
|||||||
|
# `.agents/skills/` — LCBP3 Agent Skill Pack
|
||||||
|
|
||||||
|
**Version:** 1.8.9 | **Last Updated:** 2026-04-22 | **Total Skills:** 20
|
||||||
|
|
||||||
|
Agent skills for AI-assisted development in **Windsurf IDE** (and compatible agents: Codex CLI, opencode, Amp, Antigravity, AGENTS.md-aware tools).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📂 Layout
|
||||||
|
|
||||||
|
```
|
||||||
|
.agents/skills/
|
||||||
|
├── VERSION # Single source of truth for skill-pack version
|
||||||
|
├── skills.md # Overview + dependency matrix + health monitoring
|
||||||
|
├── _LCBP3-CONTEXT.md # Shared LCBP3 context injected into every speckit-* skill
|
||||||
|
├── README.md # (this file)
|
||||||
|
├── nestjs-best-practices/ # Backend rules (40 rules across 10 categories)
|
||||||
|
├── next-best-practices/ # Frontend rules (Next.js 15+)
|
||||||
|
└── speckit-*/ # 18 workflow skills (spec → plan → tasks → implement → …)
|
||||||
|
```
|
||||||
|
|
||||||
|
Each skill directory contains:
|
||||||
|
|
||||||
|
- `SKILL.md` — frontmatter (`name`, `description`, `version: 1.8.9`, `scope`, `depends-on`, `handoffs`) + instructions
|
||||||
|
- `templates/` _(optional)_ — artifact templates (spec/plan/tasks/checklist)
|
||||||
|
- `rules/` _(nestjs only)_ — individual rule files grouped by prefix (`arch-`, `security-`, `db-`, etc.)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🚀 How Windsurf Invokes These Skills
|
||||||
|
|
||||||
|
Windsurf exposes two entry points:
|
||||||
|
|
||||||
|
1. **Skill tool** — Windsurf discovers skills by scanning `.agents/skills/*/SKILL.md` frontmatter. Skills marked `user-invocable: false` are used silently by Cascade.
|
||||||
|
2. **Slash commands** — `.windsurf/workflows/*.md` wraps each skill as a slash command (e.g. `/04-speckit.plan`). The workflow file is short; the heavy lifting is delegated to the skill via `skill` tool.
|
||||||
|
|
||||||
|
Both paths end up executing the same `SKILL.md` instructions.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🧭 Typical Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
/01-speckit.constitution → AGENTS.md / product vision
|
||||||
|
/02-speckit.specify → specs/feat-XXX/spec.md
|
||||||
|
/03-speckit.clarify → updates spec.md (up to 5 targeted questions)
|
||||||
|
/04-speckit.plan → specs/feat-XXX/plan.md + data-model.md + contracts/
|
||||||
|
/05-speckit.tasks → specs/feat-XXX/tasks.md
|
||||||
|
/06-speckit.analyze → cross-artifact consistency report (read-only)
|
||||||
|
/07-speckit.implement → executes tasks with Ironclad Protocols (Blast Radius + Strangler + TDD)
|
||||||
|
/08-speckit.checker → pnpm lint / typecheck / markdown-lint
|
||||||
|
/09-speckit.tester → pnpm test + coverage gates (Backend 70%+, Business Logic 80%+)
|
||||||
|
/10-speckit.reviewer → code review with Tier 1/2/3 classification
|
||||||
|
/11-speckit.validate → UAT / acceptance-criteria.md
|
||||||
|
```
|
||||||
|
|
||||||
|
Use `/00-speckit.all` to run specify → clarify → plan → tasks → analyze in one go.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🛠️ Helper Scripts
|
||||||
|
|
||||||
|
From repo root:
|
||||||
|
|
||||||
|
| Script | Purpose |
|
||||||
|
| --- | --- |
|
||||||
|
| `./.agents/scripts/bash/check-prerequisites.sh --json` | Emit `FEATURE_DIR` + `AVAILABLE_DOCS` for a feature branch |
|
||||||
|
| `./.agents/scripts/bash/setup-plan.sh --json` | Emit `FEATURE_SPEC`, `IMPL_PLAN`, `SPECS_DIR`, `BRANCH` |
|
||||||
|
| `./.agents/scripts/bash/update-agent-context.sh windsurf` | Append tech entries to `AGENTS.md` |
|
||||||
|
| `./.agents/scripts/bash/audit-skills.sh` | Validate all `SKILL.md` frontmatter + presence |
|
||||||
|
| `./.agents/scripts/bash/validate-versions.sh` | Version consistency check |
|
||||||
|
| `./.agents/scripts/bash/sync-workflows.sh` | Verify every skill has a `.windsurf/workflows/*.md` wrapper |
|
||||||
|
|
||||||
|
All scripts mirror to `.agents/scripts/powershell/*.ps1` for Windows.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ⚠️ Tier 1 Non-Negotiables (auto-enforced)
|
||||||
|
|
||||||
|
- ADR-019 — `publicId` exposed directly; no `parseInt` / `Number` / `+` on UUID; no `id ?? ''` fallback
|
||||||
|
- ADR-009 — edit SQL schema directly, no TypeORM migrations
|
||||||
|
- ADR-016 — JWT + CASL on every mutation; `Idempotency-Key` required; ClamAV two-phase upload
|
||||||
|
- ADR-018 — AI via DMS API only (Ollama on Admin Desktop; no direct DB/storage)
|
||||||
|
- ADR-007 — layered error classification (Validation / Business / System)
|
||||||
|
- Zero `any`, zero `console.log` (use `Logger`)
|
||||||
|
|
||||||
|
See [`_LCBP3-CONTEXT.md`](./_LCBP3-CONTEXT.md) for the complete list.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🤝 Extending
|
||||||
|
|
||||||
|
To add a new skill:
|
||||||
|
|
||||||
|
1. Create `NAME/SKILL.md` with frontmatter: `name`, `description`, `version: 1.8.9`, `scope`, `depends-on`.
|
||||||
|
2. Append an LCBP3 context reference pointing to `_LCBP3-CONTEXT.md`.
|
||||||
|
3. Wrap with `.windsurf/workflows/NAME.md` so it becomes a slash command.
|
||||||
|
4. Update [`skills.md`](./skills.md) dependency matrix.
|
||||||
|
5. Run `./.agents/scripts/bash/audit-skills.sh` → must pass.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📚 References
|
||||||
|
|
||||||
|
- **Canonical rules:** `AGENTS.md` (repo root)
|
||||||
|
- **Product vision:** `specs/00-Overview/00-03-product-vision.md`
|
||||||
|
- **ADRs:** `specs/06-Decision-Records/`
|
||||||
|
- **Engineering guidelines:** `specs/05-Engineering-Guidelines/`
|
||||||
|
- **Contributing:** `CONTRIBUTING.md`
|
||||||
+11
-2
@@ -1,10 +1,19 @@
|
|||||||
# Speckit Skills Version
|
# Speckit Skills Version
|
||||||
|
|
||||||
version: 1.8.6
|
version: 1.8.9
|
||||||
release_date: 2026-04-14
|
release_date: 2026-04-22
|
||||||
|
|
||||||
## Changelog
|
## Changelog
|
||||||
|
|
||||||
|
### 1.8.9 (2026-04-22)
|
||||||
|
- Full LCBP3-native rebuild of `.agents/skills/`
|
||||||
|
- Fixed ADR-019 drift (removed `@Expose({ name: 'id' })` and `id ?? ''` fallback patterns)
|
||||||
|
- Replaced all dead references (`GEMINI.md` → `AGENTS.md`, v1.7.0 → v1.8.0 schema, `.specify/memory/` → `AGENTS.md`)
|
||||||
|
- Added real helper scripts under `.agents/scripts/bash/` and `.agents/scripts/powershell/`
|
||||||
|
- Added ADR-007/008/020/021 coverage
|
||||||
|
- New rules: workflow-engine, file-two-phase-upload, ai-boundary, i18n, file-upload, workflow-banner
|
||||||
|
- Standardized frontmatter across all 20 skills (`version: 1.8.9`)
|
||||||
|
|
||||||
### 1.8.6 (2026-04-14)
|
### 1.8.6 (2026-04-14)
|
||||||
- Version alignment with LCBP3-DMS v1.8.6
|
- Version alignment with LCBP3-DMS v1.8.6
|
||||||
- Complete skill implementations for all 20 skills
|
- Complete skill implementations for all 20 skills
|
||||||
|
|||||||
@@ -0,0 +1,91 @@
|
|||||||
|
# 🧭 LCBP3-DMS Context Appendix (Shared)
|
||||||
|
|
||||||
|
> This file is included/referenced by every Speckit skill as the authoritative project context.
|
||||||
|
> Skills **must** load it (or the files it links to) before generating any artifact.
|
||||||
|
|
||||||
|
**Project:** NAP-DMS (LCBP3) — Laem Chabang Port Phase 3 Document Management System
|
||||||
|
**Stack:** NestJS 11 + Next.js 16 + TypeScript + MariaDB 11.8 + Redis + BullMQ + Elasticsearch + Ollama (on-prem AI)
|
||||||
|
**Version:** 1.8.9 (2026-04-18)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📌 Canonical Rule Sources (read in this order)
|
||||||
|
|
||||||
|
1. **`AGENTS.md`** (repo root) — primary rule file for AI agents; supersedes legacy `GEMINI.md`.
|
||||||
|
2. **`specs/06-Decision-Records/`** — architectural decisions (22 ADRs); ADR priority > Engineering Guidelines.
|
||||||
|
3. **`specs/05-Engineering-Guidelines/`** — backend/frontend/testing/i18n/git patterns.
|
||||||
|
4. **`specs/00-Overview/00-02-glossary.md`** — domain terminology (Correspondence / RFA / Transmittal / Circulation).
|
||||||
|
5. **`specs/00-Overview/00-03-product-vision.md`** — project constitution (Vision, Strategic Pillars, Guardrails).
|
||||||
|
6. **`CONTRIBUTING.md`** — spec writing standards, PR template, review levels.
|
||||||
|
7. **`README.md`** — technology stack + getting started.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🔴 Tier 1 Non-Negotiables
|
||||||
|
|
||||||
|
- **ADR-019 UUID:** `publicId: string` exposed directly — **no** `@Expose({ name: 'id' })` rename; **no** `parseInt`/`Number`/`+` on UUID; **no** `id ?? ''` fallback in frontend.
|
||||||
|
- **ADR-009:** No TypeORM migrations — edit `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql` or add a `deltas/*.sql` file.
|
||||||
|
- **ADR-016 Security:** JWT + CASL 4-Level RBAC; `@UseGuards(JwtAuthGuard, CaslAbilityGuard)` on every mutation controller; `ThrottlerGuard` on auth; bcrypt 12 rounds; `Idempotency-Key` required on POST/PUT/PATCH.
|
||||||
|
- **ADR-002 Document Numbering:** Redis Redlock + TypeORM `@VersionColumn` (double-lock). Never use application-side counter alone.
|
||||||
|
- **ADR-008 Notifications:** BullMQ queue — never inline email/notification in a request thread.
|
||||||
|
- **ADR-018 AI Boundary:** Ollama on Admin Desktop only; AI → DMS API → DB (never direct DB/storage). Human-in-the-loop validation required.
|
||||||
|
- **ADR-007 Error Handling:** Layered (Validation / Business / System); `BusinessException` hierarchy; user-friendly `userMessage` + `recoveryAction`; technical stack only in logs.
|
||||||
|
- **TypeScript Strict:** Zero `any`, zero `console.log` (use NestJS `Logger`).
|
||||||
|
- **i18n:** No hardcoded Thai/English strings in components — use i18n keys (see `05-08-i18n-guidelines.md`).
|
||||||
|
- **File Upload:** Two-phase (Temp → ClamAV → Permanent), whitelist `PDF/DWG/DOCX/XLSX/ZIP`, max 50MB, `StorageService` only.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🏷️ Domain Glossary (reject generic terms)
|
||||||
|
|
||||||
|
| ✅ Use | ❌ Don't Use |
|
||||||
|
| --- | --- |
|
||||||
|
| Correspondence | Letter, Communication, Document |
|
||||||
|
| RFA | Approval Request, Submit for Approval |
|
||||||
|
| Transmittal | Delivery Note, Cover Letter |
|
||||||
|
| Circulation | Distribution, Routing |
|
||||||
|
| Shop Drawing | Construction Drawing |
|
||||||
|
| Contract Drawing | Design Drawing, Blueprint |
|
||||||
|
| Workflow Engine | Approval Flow, Process Engine |
|
||||||
|
| Document Numbering | Document ID, Auto Number |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 📁 Key Files for Generating / Validating Artifacts
|
||||||
|
|
||||||
|
| When you need... | Read |
|
||||||
|
| --- | --- |
|
||||||
|
| A new feature spec | `.agents/skills/speckit-specify/templates/spec-template.md` + `specs/01-Requirements/01-06-edge-cases-and-rules.md` |
|
||||||
|
| A plan | `.agents/skills/speckit-plan/templates/plan-template.md` + relevant ADRs |
|
||||||
|
| Task breakdown | `.agents/skills/speckit-tasks/templates/tasks-template.md` + existing patterns in `specs/08-Tasks/` |
|
||||||
|
| Acceptance criteria / UAT | `specs/01-Requirements/01-05-acceptance-criteria.md` |
|
||||||
|
| Schema / table definition | `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql` + `03-01-data-dictionary.md` |
|
||||||
|
| RBAC / permissions | `specs/03-Data-and-Storage/lcbp3-v1.8.0-seed-permissions.sql` + `01-02-01-rbac-matrix.md` |
|
||||||
|
| Release / hotfix | `specs/04-Infrastructure-OPS/04-08-release-management-policy.md` |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🛠️ Helper Scripts (real paths in this repo)
|
||||||
|
|
||||||
|
- `./.agents/scripts/bash/check-prerequisites.sh` / `powershell/*.ps1`
|
||||||
|
- `./.agents/scripts/bash/setup-plan.sh`
|
||||||
|
- `./.agents/scripts/bash/update-agent-context.sh windsurf`
|
||||||
|
- `./.agents/scripts/bash/audit-skills.sh`
|
||||||
|
- `./.agents/scripts/bash/validate-versions.sh`
|
||||||
|
- `./.agents/scripts/bash/sync-workflows.sh`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ✅ Commit Checklist (applied automatically by speckit-implement)
|
||||||
|
|
||||||
|
- [ ] UUID pattern verified (no `parseInt` / `Number` / `+` on UUID, no `id ?? ''` fallback)
|
||||||
|
- [ ] No `any`, no `console.log` in committed code
|
||||||
|
- [ ] Business comments in Thai, code identifiers in English
|
||||||
|
- [ ] Schema changes via SQL directly (not migration)
|
||||||
|
- [ ] Test coverage meets targets (Backend 70%+, Business Logic 80%+)
|
||||||
|
- [ ] Relevant ADRs referenced (007/008/009/016/018/019/020/021)
|
||||||
|
- [ ] Domain glossary terms used correctly
|
||||||
|
- [ ] Error handling: `Logger` + `HttpException` / `BusinessException`
|
||||||
|
- [ ] i18n keys used (no hardcode text)
|
||||||
|
- [ ] Cache invalidation when data mutated
|
||||||
|
- [ ] OWASP Top 10 review passed
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,12 @@
|
|||||||
---
|
---
|
||||||
name: nestjs-best-practices
|
name: nestjs-best-practices
|
||||||
description: NestJS best practices and architecture patterns for building production-ready applications. This skill should be used when writing, reviewing, or refactoring NestJS code to ensure proper patterns for modules, dependency injection, security, and performance.
|
description: NestJS best practices and architecture patterns for building production-ready LCBP3-DMS backend code. Enforces ADR-009 (no TypeORM migrations), ADR-019 (hybrid UUID), ADR-016 (security), ADR-007 (error handling), ADR-008 (BullMQ), ADR-001/002 (workflow + numbering), ADR-018/020 (AI boundary), and ADR-021 (workflow context).
|
||||||
|
version: 1.8.9
|
||||||
|
scope: backend
|
||||||
|
user-invocable: false
|
||||||
license: MIT
|
license: MIT
|
||||||
metadata:
|
metadata:
|
||||||
author: Kadajett
|
upstream: 'Kadajett/nestjs-best-practices v1.1.0 (forked + LCBP3-aligned)'
|
||||||
version: '1.1.0'
|
|
||||||
---
|
---
|
||||||
|
|
||||||
# NestJS Best Practices
|
# NestJS Best Practices
|
||||||
@@ -110,6 +112,13 @@ Reference these guidelines when:
|
|||||||
- `devops-use-logging` - Structured logging
|
- `devops-use-logging` - Structured logging
|
||||||
- `devops-graceful-shutdown` - Zero-downtime deployments
|
- `devops-graceful-shutdown` - Zero-downtime deployments
|
||||||
|
|
||||||
|
### 11. LCBP3-Specific (CRITICAL — Project Overrides)
|
||||||
|
|
||||||
|
- `db-no-typeorm-migrations` — **CRITICAL** ADR-009: edit SQL directly
|
||||||
|
- `lcbp3-workflow-engine` — **CRITICAL** ADR-001/002/021: DSL state machine + double-lock numbering + workflow context
|
||||||
|
- `security-file-two-phase-upload` — **CRITICAL** ADR-016: Upload → Temp → ClamAV → Commit
|
||||||
|
- `lcbp3-ai-boundary` — **CRITICAL** ADR-018/020: Ollama on-prem only, human-in-the-loop
|
||||||
|
|
||||||
## NAP-DMS Project-Specific Rules (MUST FOLLOW)
|
## NAP-DMS Project-Specific Rules (MUST FOLLOW)
|
||||||
|
|
||||||
These rules override general NestJS best practices for the NAP-DMS project:
|
These rules override general NestJS best practices for the NAP-DMS project:
|
||||||
@@ -120,21 +129,62 @@ These rules override general NestJS best practices for the NAP-DMS project:
|
|||||||
- แก้ไข schema โดยตรงที่: `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql`
|
- แก้ไข schema โดยตรงที่: `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql`
|
||||||
- ใช้ n8n workflow สำหรับ data migration ถ้าจำเป็น
|
- ใช้ n8n workflow สำหรับ data migration ถ้าจำเป็น
|
||||||
|
|
||||||
### ADR-019: Hybrid Identifier Strategy (CRITICAL)
|
### ADR-019: Hybrid Identifier Strategy (CRITICAL — March 2026 Pattern)
|
||||||
|
|
||||||
|
> **Updated pattern:** `UuidBaseEntity` exposes `publicId` **directly**. ห้ามใช้ `@Expose({ name: 'id' })` — API จะคืน `publicId` เป็น field name ตรงๆ.
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
|
// ✅ CORRECT — ใช้ UuidBaseEntity
|
||||||
@Entity()
|
@Entity()
|
||||||
export class Project {
|
export class Project extends UuidBaseEntity {
|
||||||
@PrimaryGeneratedColumn()
|
// publicId (string UUIDv7) + id (INT, @Exclude) สืบทอดจาก UuidBaseEntity
|
||||||
@Exclude() // ห้ามส่งออกทาง API
|
// API response → { publicId: "019505a1-7c3e-7000-8000-abc123..." }
|
||||||
id: number; // INT AUTO_INCREMENT - internal only
|
|
||||||
|
|
||||||
@Column({ type: 'uuid' })
|
@Column()
|
||||||
@Expose({ name: 'id' }) // ส่งออกเป็น 'id' ทาง API
|
projectCode: string;
|
||||||
publicId: string; // UUIDv7 - public API identifier
|
|
||||||
|
@Column()
|
||||||
|
projectName: string;
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ WRONG — pattern เก่า ห้ามใช้
|
||||||
|
@Entity()
|
||||||
|
export class OldProject {
|
||||||
|
@PrimaryGeneratedColumn()
|
||||||
|
@Exclude()
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ type: 'uuid' })
|
||||||
|
@Expose({ name: 'id' }) // ❌ อย่า rename publicId เป็น 'id'
|
||||||
|
publicId: string;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**DTO Input (รับ UUID จาก Frontend):**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export class CreateContractDto {
|
||||||
|
@IsUUID('7')
|
||||||
|
projectUuid: string; // รับ UUID string จาก client
|
||||||
|
}
|
||||||
|
|
||||||
|
// Controller resolves UUID → INT internally
|
||||||
|
@Post()
|
||||||
|
async create(@Body() dto: CreateContractDto) {
|
||||||
|
const projectId = await this.projectService.resolveInternalId(dto.projectUuid);
|
||||||
|
return this.contractService.create({ ...dto, projectId });
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**ห้ามเด็ดขาด (CI Blocker):**
|
||||||
|
|
||||||
|
- ❌ `parseInt(projectPublicId)` — "019505…" → 19 (silently wrong)
|
||||||
|
- ❌ `Number(publicId)` / `+publicId` — NaN
|
||||||
|
- ❌ `@Expose({ name: 'id' })` บน `publicId` (pattern เก่า)
|
||||||
|
- ❌ Expose INT `id` ใน API response (ต้อง `@Exclude()` เสมอ)
|
||||||
|
|
||||||
### Two-Phase File Upload
|
### Two-Phase File Upload
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
|
|||||||
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"version": "1.8.9",
|
||||||
|
"organization": "**NAP-DMS / LCBP3** — Laem Chabang Port Phase 3 Document Management System",
|
||||||
|
"date": "2026-04-22",
|
||||||
|
"abstract": "Comprehensive NestJS best-practices guide compiled for the LCBP3-DMS backend. Contains 40+ rules across 11 categories (10 general + 1 project-specific), prioritized by impact. Forked from Kadajett/nestjs-best-practices (v1.1.0) and aligned to LCBP3 ADRs: ADR-001 (workflow engine), ADR-002 (document numbering), ADR-007 (error handling), ADR-008 (notifications/BullMQ), ADR-009 (no TypeORM migrations), ADR-016 (security), ADR-018/020 (AI boundary), ADR-019 (hybrid UUID identifier — March 2026 pattern), and ADR-021 (workflow context).\n\nThis document is the single, consolidated reference used by Cascade and other AI coding agents when writing, reviewing, or refactoring backend code in this repository. All LCBP3-specific overrides live in section 11.",
|
||||||
|
"references": [
|
||||||
|
"[AGENTS.md (root)](../../../AGENTS.md) — canonical AI agent rules",
|
||||||
|
"[CONTRIBUTING.md](../../../CONTRIBUTING.md) — spec authoring + PR process",
|
||||||
|
"[ADR-001 Unified Workflow Engine](../../../specs/06-Decision-Records/ADR-001-unified-workflow-engine.md)",
|
||||||
|
"[ADR-002 Document Numbering Strategy](../../../specs/06-Decision-Records/ADR-002-document-numbering-strategy.md)",
|
||||||
|
"[ADR-007 Error Handling Strategy](../../../specs/06-Decision-Records/ADR-007-error-handling-strategy.md)",
|
||||||
|
"[ADR-008 Email/Notification Strategy](../../../specs/06-Decision-Records/ADR-008-email-notification-strategy.md)",
|
||||||
|
"[ADR-009 Database Migration Strategy](../../../specs/06-Decision-Records/ADR-009-database-migration-strategy.md)",
|
||||||
|
"[ADR-016 Security & Authentication](../../../specs/06-Decision-Records/ADR-016-security-authentication.md)",
|
||||||
|
"[ADR-018 AI Boundary](../../../specs/06-Decision-Records/ADR-018-ai-boundary.md)",
|
||||||
|
"[ADR-019 Hybrid Identifier Strategy](../../../specs/06-Decision-Records/ADR-019-hybrid-identifier-strategy.md)",
|
||||||
|
"[ADR-020 AI Intelligence Integration](../../../specs/06-Decision-Records/ADR-020-ai-intelligence-integration.md)",
|
||||||
|
"[ADR-021 Workflow Context](../../../specs/06-Decision-Records/ADR-021-workflow-context.md)",
|
||||||
|
"[Backend Engineering Guidelines](../../../specs/05-Engineering-Guidelines/05-02-backend-guidelines.md)",
|
||||||
|
"[Schema — v1.8.0 Tables](../../../specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql)",
|
||||||
|
"[Data Dictionary](../../../specs/03-Data-and-Storage/03-01-data-dictionary.md)",
|
||||||
|
"Upstream: [Kadajett/nestjs-best-practices](https://github.com/Kadajett/nestjs-best-practices) v1.1.0"
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -5,20 +5,22 @@ impactDescription: Use INT PK internally + UUID for public API per project ADR-0
|
|||||||
tags: database, uuid, identifier, adr-019, api-design, typeorm
|
tags: database, uuid, identifier, adr-019, api-design, typeorm
|
||||||
---
|
---
|
||||||
|
|
||||||
## Hybrid Identifier Strategy (ADR-019)
|
## Hybrid Identifier Strategy (ADR-019) — March 2026 Pattern
|
||||||
|
|
||||||
**This project follows ADR-019: INT Primary Key (internal) + UUIDv7 (public API)**
|
**This project follows ADR-019: INT Primary Key (internal) + UUIDv7 (public API)**
|
||||||
|
|
||||||
Unlike standard practices that use UUID as the primary key, this project uses a **hybrid approach** optimized for MariaDB performance and API consistency.
|
Unlike standard practices that use UUID as the primary key, this project uses a **hybrid approach** optimized for MariaDB performance and API consistency.
|
||||||
|
|
||||||
|
> **Updated pattern (March 2026):** Entities extend `UuidBaseEntity`. The `publicId` column is exposed **directly** in API responses — ห้ามใช้ `@Expose({ name: 'id' })` เพื่อ rename.
|
||||||
|
|
||||||
### The Strategy
|
### The Strategy
|
||||||
|
|
||||||
| Layer | Field | Type | Usage |
|
| Layer | Field | Type | Usage |
|
||||||
|-------|-------|------|-------|
|
| --------------- | ---------- | ----------------------------------- | ------------------------------------------------- |
|
||||||
| **Database PK** | `id` | `INT AUTO_INCREMENT` | Internal foreign keys only |
|
| **Database PK** | `id` | `INT AUTO_INCREMENT` | Internal foreign keys only (marked `@Exclude()`) |
|
||||||
| **Public API** | `uuid` | `MariaDB UUID` (native) | External references, URLs |
|
| **Public API** | `publicId` | `MariaDB UUID` (native, BINARY(16)) | External references, URLs — exposed as-is |
|
||||||
| **DTO Input** | `xxxUuid` | `string` | Accept UUID in create/update |
|
| **DTO Input** | `xxxUuid` | `string` (UUIDv7) | Accept UUID in create/update DTOs |
|
||||||
| **DTO Output** | `id` | `string` | API returns UUID as `id` via `@Expose` |
|
| **DTO Output** | `publicId` | `string` (UUIDv7) | API returns `publicId` field directly (no rename) |
|
||||||
|
|
||||||
### Why Hybrid IDs?
|
### Why Hybrid IDs?
|
||||||
|
|
||||||
@@ -27,31 +29,51 @@ Unlike standard practices that use UUID as the primary key, this project uses a
|
|||||||
- **Compatibility**: UUID works well with distributed systems and external integrations
|
- **Compatibility**: UUID works well with distributed systems and external integrations
|
||||||
- **MariaDB Native**: Uses MariaDB's native UUID type (stored as BINARY(16), auto-converts to string)
|
- **MariaDB Native**: Uses MariaDB's native UUID type (stored as BINARY(16), auto-converts to string)
|
||||||
|
|
||||||
### Entity Definition
|
### Entity Definition (Current Pattern)
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { Entity, PrimaryGeneratedColumn, Column, Index } from 'typeorm';
|
import { Entity, Column } from 'typeorm';
|
||||||
import { Exclude, Expose } from 'class-transformer';
|
import { UuidBaseEntity } from '@/common/entities/uuid-base.entity';
|
||||||
|
|
||||||
@Entity('contracts')
|
@Entity('contracts')
|
||||||
export class Contract {
|
export class Contract extends UuidBaseEntity {
|
||||||
@PrimaryGeneratedColumn()
|
// publicId (string UUIDv7) + id (INT, @Exclude) สืบทอดจาก UuidBaseEntity
|
||||||
@Exclude() // Never expose in API response
|
// API response → { publicId: "019505a1-7c3e-7000-8000-abc123...", contractCode: ..., ... }
|
||||||
id: number; // Internal INT PK - used for FK relationships
|
|
||||||
|
|
||||||
@Column({ type: 'uuid', unique: true })
|
|
||||||
@Expose({ name: 'id' }) // Exposed as 'id' in API
|
|
||||||
uuid: string; // Public UUIDv7 - what API consumers see
|
|
||||||
|
|
||||||
@Column()
|
@Column()
|
||||||
contractCode: string;
|
contractCode: string;
|
||||||
|
|
||||||
@Column()
|
@Column()
|
||||||
contractName: string;
|
contractName: string;
|
||||||
|
|
||||||
|
@Column({ name: 'project_id' })
|
||||||
|
projectId: number; // INT FK — internal, not exposed if marked @Exclude in UuidBaseEntity
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### DTO Pattern (Accept UUID, Resolve to INT)
|
**`UuidBaseEntity` (shared base):**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { PrimaryGeneratedColumn, Column, CreateDateColumn, UpdateDateColumn } from 'typeorm';
|
||||||
|
import { Exclude } from 'class-transformer';
|
||||||
|
|
||||||
|
export abstract class UuidBaseEntity {
|
||||||
|
@PrimaryGeneratedColumn()
|
||||||
|
@Exclude() // ❗ CRITICAL: INT id must never leak to API
|
||||||
|
id: number;
|
||||||
|
|
||||||
|
@Column({ type: 'uuid', unique: true, generated: 'uuid' })
|
||||||
|
publicId: string; // UUIDv7, exposed as-is
|
||||||
|
|
||||||
|
@CreateDateColumn()
|
||||||
|
createdAt: Date;
|
||||||
|
|
||||||
|
@UpdateDateColumn()
|
||||||
|
updatedAt: Date;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### DTO Pattern (Accept UUID, Resolve to INT Internally)
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// dto/create-contract.dto.ts
|
// dto/create-contract.dto.ts
|
||||||
@@ -59,8 +81,8 @@ import { IsUUID, IsNotEmpty } from 'class-validator';
|
|||||||
|
|
||||||
export class CreateContractDto {
|
export class CreateContractDto {
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
@IsUUID('4')
|
@IsUUID('7') // UUIDv7 (MariaDB native)
|
||||||
projectUuid: string; // Accept UUID from client
|
projectUuid: string; // Accept UUID from client
|
||||||
|
|
||||||
@IsNotEmpty()
|
@IsNotEmpty()
|
||||||
contractCode: string;
|
contractCode: string;
|
||||||
@@ -69,48 +91,38 @@ export class CreateContractDto {
|
|||||||
contractName: string;
|
contractName: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// dto/contract-response.dto.ts
|
// ❌ NO Response DTO with @Expose rename needed.
|
||||||
import { Exclude, Expose } from 'class-transformer';
|
// Entity class_transformer via TransformInterceptor will serialize publicId directly.
|
||||||
|
|
||||||
export class ContractResponseDto {
|
|
||||||
@Expose({ name: 'id' })
|
|
||||||
uuid: string; // Returned as 'id' field in JSON
|
|
||||||
|
|
||||||
contractCode: string;
|
|
||||||
contractName: string;
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Service/Controller Pattern
|
### Service/Controller Pattern
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
@Controller('contracts')
|
@Controller('contracts')
|
||||||
|
@UseGuards(JwtAuthGuard, CaslAbilityGuard)
|
||||||
export class ContractsController {
|
export class ContractsController {
|
||||||
constructor(
|
constructor(
|
||||||
private contractsService: ContractsService,
|
private contractsService: ContractsService,
|
||||||
private uuidResolver: UuidResolver, // Helper to convert UUID → INT
|
private uuidResolver: UuidResolver
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
@Post()
|
@Post()
|
||||||
async create(@Body() dto: CreateContractDto) {
|
async create(@Body() dto: CreateContractDto) {
|
||||||
// Resolve UUID to INT PK for database operations
|
// Resolve UUID → INT PK for FK relationship
|
||||||
const projectId = await this.uuidResolver.resolveProject(dto.projectUuid);
|
const projectId = await this.uuidResolver.resolveProject(dto.projectUuid);
|
||||||
|
|
||||||
// Create with INT FK
|
|
||||||
const contract = await this.contractsService.create({
|
const contract = await this.contractsService.create({
|
||||||
...dto,
|
...dto,
|
||||||
projectId, // INT for database
|
projectId,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Response automatically transforms via @Expose
|
// Response: TransformInterceptor + @Exclude on id → publicId exposed directly
|
||||||
return contract;
|
return contract;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Get(':id')
|
@Get(':publicId')
|
||||||
async findOne(@Param('id') uuid: string) {
|
async findOne(@Param('publicId', ParseUuidPipe) publicId: string) {
|
||||||
// Controller receives UUID string
|
return this.contractsService.findOneByPublicId(publicId);
|
||||||
// Service handles UUID → INT resolution internally
|
|
||||||
return this.contractsService.findByUuid(uuid);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -124,21 +136,21 @@ export class UuidResolver {
|
|||||||
@InjectRepository(Project)
|
@InjectRepository(Project)
|
||||||
private projectRepo: Repository<Project>,
|
private projectRepo: Repository<Project>,
|
||||||
@InjectRepository(Contract)
|
@InjectRepository(Contract)
|
||||||
private contractRepo: Repository<Contract>,
|
private contractRepo: Repository<Contract>
|
||||||
) {}
|
) {}
|
||||||
|
|
||||||
async resolveProject(uuid: string): Promise<number> {
|
async resolveProject(publicId: string): Promise<number> {
|
||||||
const project = await this.projectRepo.findOne({
|
const project = await this.projectRepo.findOne({
|
||||||
where: { uuid },
|
where: { publicId },
|
||||||
select: ['id'], // Only fetch INT PK
|
select: ['id'], // Only INT PK for FK
|
||||||
});
|
});
|
||||||
if (!project) throw new NotFoundException('Project not found');
|
if (!project) throw new NotFoundException('Project not found');
|
||||||
return project.id;
|
return project.id;
|
||||||
}
|
}
|
||||||
|
|
||||||
async resolveContract(uuid: string): Promise<number> {
|
async resolveContract(publicId: string): Promise<number> {
|
||||||
const contract = await this.contractRepo.findOne({
|
const contract = await this.contractRepo.findOne({
|
||||||
where: { uuid },
|
where: { publicId },
|
||||||
select: ['id'],
|
select: ['id'],
|
||||||
});
|
});
|
||||||
if (!contract) throw new NotFoundException('Contract not found');
|
if (!contract) throw new NotFoundException('Contract not found');
|
||||||
@@ -147,20 +159,20 @@ export class UuidResolver {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### TransformInterceptor (Required)
|
### TransformInterceptor (Required — register ONCE)
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Must be configured globally to handle @Exclude/@Expose
|
// Register via APP_INTERCEPTOR in CommonModule — ห้ามซ้ำใน main.ts
|
||||||
@Injectable()
|
@Injectable()
|
||||||
export class TransformInterceptor implements NestInterceptor {
|
export class TransformInterceptor implements NestInterceptor {
|
||||||
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
|
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
|
||||||
return next.handle().pipe(
|
return next.handle().pipe(
|
||||||
map((data) => instanceToPlain(data)), // Applies class-transformer decorators
|
map((data) => instanceToPlain(data)) // Applies @Exclude / @Expose
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// app.module.ts
|
// common.module.ts
|
||||||
@Module({
|
@Module({
|
||||||
providers: [
|
providers: [
|
||||||
{
|
{
|
||||||
@@ -169,40 +181,42 @@ export class TransformInterceptor implements NestInterceptor {
|
|||||||
},
|
},
|
||||||
],
|
],
|
||||||
})
|
})
|
||||||
export class AppModule {}
|
export class CommonModule {}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
> **Warning:** ห้ามเรียก `app.useGlobalInterceptors(new TransformInterceptor())` ใน `main.ts` ซ้ำ — จะทำให้ response double-wrap `{ data: { data: ... } }`.
|
||||||
|
|
||||||
### Critical: NEVER ParseInt on UUID
|
### Critical: NEVER ParseInt on UUID
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// ❌ WRONG - parseInt on UUID gives garbage value
|
// ❌ WRONG - parseInt on UUID gives garbage value
|
||||||
const id = parseInt(projectUuid); // "0195a1b2-..." → 195 (wrong!)
|
const id = parseInt(projectPublicId); // "0195a1b2-..." → 195 (wrong!)
|
||||||
|
|
||||||
// ❌ WRONG - Number() on UUID
|
// ❌ WRONG - Number() on UUID
|
||||||
const id = Number(projectUuid); // NaN
|
const id = Number(projectPublicId); // NaN
|
||||||
|
|
||||||
// ❌ WRONG - Unary plus on UUID
|
// ❌ WRONG - Unary plus on UUID
|
||||||
const id = +projectUuid; // NaN
|
const id = +projectPublicId; // NaN
|
||||||
|
|
||||||
// ✅ CORRECT - Resolve via database lookup
|
// ✅ CORRECT - Resolve via database lookup
|
||||||
const projectId = await uuidResolver.resolveProject(projectUuid);
|
const projectId = await uuidResolver.resolveProject(projectPublicId);
|
||||||
|
|
||||||
// ✅ CORRECT - Use TypeORM find with UUID column
|
// ✅ CORRECT - Use TypeORM find with publicId column
|
||||||
const project = await projectRepo.findOne({ where: { uuid: projectUuid } });
|
const project = await projectRepo.findOne({ where: { publicId: projectPublicId } });
|
||||||
const id = project.id; // Get INT PK from entity
|
const id = project.id; // Get INT PK from entity
|
||||||
```
|
```
|
||||||
|
|
||||||
### Query with UUID (No Resolution Needed)
|
### Query with publicId (No Resolution Needed)
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
// Direct UUID lookup in TypeORM
|
// Direct UUID lookup in TypeORM
|
||||||
const project = await this.projectRepo.findOne({
|
const project = await this.projectRepo.findOne({
|
||||||
where: { uuid: projectUuid }, // Query by UUID column
|
where: { publicId: projectPublicId },
|
||||||
});
|
});
|
||||||
|
|
||||||
// Relations use INT FK internally
|
// Relations use INT FK internally
|
||||||
const contracts = await this.contractRepo.find({
|
const contracts = await this.contractRepo.find({
|
||||||
where: { projectId: project.id }, // INT for FK query
|
where: { projectId: project.id }, // INT for FK query
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,100 @@
|
|||||||
|
---
|
||||||
|
title: No TypeORM Migrations (ADR-009)
|
||||||
|
impact: CRITICAL
|
||||||
|
impactDescription: Edit SQL schema files directly; n8n handles data migration. Do not generate TypeORM migration files.
|
||||||
|
tags: database, schema, migration, adr-009, sql, n8n
|
||||||
|
---
|
||||||
|
|
||||||
|
## No TypeORM Migrations (ADR-009)
|
||||||
|
|
||||||
|
**This project does NOT use TypeORM migration files.**
|
||||||
|
|
||||||
|
All schema changes must be made **directly** in the canonical SQL file:
|
||||||
|
|
||||||
|
- `specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql`
|
||||||
|
|
||||||
|
Delta scripts (for incremental rollout to existing environments) go under:
|
||||||
|
|
||||||
|
- `specs/03-Data-and-Storage/deltas/YYYY-MM-DD-descriptive-name.sql`
|
||||||
|
|
||||||
|
Data migration (e.g., backfilling a new column) is handled by **n8n workflows**, not TypeORM's `QueryRunner`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Why No Migrations?
|
||||||
|
|
||||||
|
1. **Single source of truth** — The full SQL schema is always readable as one file. No need to replay a migration chain to understand current state.
|
||||||
|
2. **Review friendly** — Schema diff = git diff on the SQL file. Reviewers see the complete picture.
|
||||||
|
3. **Ops alignment** — DBAs and operators work in SQL, not TypeScript.
|
||||||
|
4. **n8n for data** — Business-meaningful data transforms live in n8n where they can be versioned, retried, and orchestrated with monitoring.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ✅ Workflow for a Schema Change
|
||||||
|
|
||||||
|
1. **Update Data Dictionary** first:
|
||||||
|
- `specs/03-Data-and-Storage/03-01-data-dictionary.md` — add field meaning + business rules.
|
||||||
|
2. **Update the canonical schema**:
|
||||||
|
- Edit `lcbp3-v1.8.0-schema-02-tables.sql` — add/alter column, constraint, index.
|
||||||
|
3. **Add a delta script** (if deploying to existing env):
|
||||||
|
- `specs/03-Data-and-Storage/deltas/2026-04-22-add-rfa-revision-column.sql`
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Delta: Add revision column to rfa table
|
||||||
|
ALTER TABLE rfa
|
||||||
|
ADD COLUMN revision INT NOT NULL DEFAULT 1 AFTER status;
|
||||||
|
|
||||||
|
CREATE INDEX idx_rfa_revision ON rfa(revision);
|
||||||
|
```
|
||||||
|
4. **Update the Entity** (`backend/src/.../entities/rfa.entity.ts`):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Column({ type: 'int', default: 1 })
|
||||||
|
revision: number;
|
||||||
|
```
|
||||||
|
5. **If data backfill needed** → create n8n workflow, not TypeScript migration.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ❌ Forbidden
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# ❌ DO NOT generate migrations
|
||||||
|
pnpm typeorm migration:generate ./src/migrations/AddRevision
|
||||||
|
|
||||||
|
# ❌ DO NOT run migrations
|
||||||
|
pnpm typeorm migration:run
|
||||||
|
```
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ DO NOT write migration classes
|
||||||
|
export class AddRevision1730000000000 implements MigrationInterface {
|
||||||
|
async up(queryRunner: QueryRunner): Promise<void> { /* ... */ }
|
||||||
|
async down(queryRunner: QueryRunner): Promise<void> { /* ... */ }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ✅ TypeORM Config (runtime only)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ormconfig.ts
|
||||||
|
export default {
|
||||||
|
type: 'mariadb',
|
||||||
|
// ...
|
||||||
|
synchronize: false, // ❗ NEVER true (would auto-sync entity ↔ schema)
|
||||||
|
migrationsRun: false, // ❗ NEVER true
|
||||||
|
// ❌ Do NOT specify `migrations:` entries
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
`synchronize: false` is mandatory because the canonical SQL file is authoritative — TypeORM should never mutate the schema.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
- [ADR-009 Database Migration Strategy](../../../../specs/06-Decision-Records/ADR-009-database-migration-strategy.md)
|
||||||
|
- [Data Dictionary](../../../../specs/03-Data-and-Storage/03-01-data-dictionary.md)
|
||||||
|
- [Schema Tables](../../../../specs/03-Data-and-Storage/lcbp3-v1.8.0-schema-02-tables.sql)
|
||||||
@@ -0,0 +1,157 @@
|
|||||||
|
---
|
||||||
|
title: AI Integration Boundary (ADR-018 / ADR-020)
|
||||||
|
impact: CRITICAL
|
||||||
|
impactDescription: AI runs on Admin Desktop only; AI → DMS API → DB (never direct); human-in-the-loop validation mandatory; full audit trail.
|
||||||
|
tags: ai, ollama, boundary, adr-018, adr-020, privacy, audit
|
||||||
|
---
|
||||||
|
|
||||||
|
## AI Integration Boundary
|
||||||
|
|
||||||
|
LCBP3 uses **on-premises AI only** (Ollama on Admin Desktop) with strict isolation from data layers.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Boundary
|
||||||
|
|
||||||
|
```
|
||||||
|
┌────────────────────────────────────────────────────────────┐
|
||||||
|
│ User Browser (Next.js) │
|
||||||
|
└─────────────────────────┬──────────────────────────────────┘
|
||||||
|
│ (authenticated HTTPS)
|
||||||
|
┌─────────────────────────▼──────────────────────────────────┐
|
||||||
|
│ DMS API (NestJS) ◀── enforces CASL, validation, audit │
|
||||||
|
│ ├─ AiGateway (proxies to Ollama) │
|
||||||
|
│ └─ DB + Storage (Elasticsearch, MariaDB, File System) │
|
||||||
|
└─────────────────────────┬──────────────────────────────────┘
|
||||||
|
│ (HTTP → Admin Desktop, internal)
|
||||||
|
┌─────────────────────────▼──────────────────────────────────┐
|
||||||
|
│ Admin Desktop (Desk-5439) │
|
||||||
|
│ ├─ Ollama (Gemma 4) │
|
||||||
|
│ ├─ PaddleOCR (Thai + English) │
|
||||||
|
│ └─ n8n orchestration │
|
||||||
|
└────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**❗ Admin Desktop has NO network access to MariaDB, no SMB to storage, no shared secrets.** It receives base64-encoded file bytes over HTTPS and returns extracted text + suggestions.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Required Patterns
|
||||||
|
|
||||||
|
### 1. AiGateway Module (backend)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Module({
|
||||||
|
controllers: [AiController],
|
||||||
|
providers: [AiService, AiGateway, AiAuditLogger],
|
||||||
|
exports: [AiService],
|
||||||
|
})
|
||||||
|
export class AiModule {}
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AiService {
|
||||||
|
async extractMetadata(fileId: number, user: User): Promise<ExtractedMetadata> {
|
||||||
|
// 1. Authorize (CASL: user can read this file)
|
||||||
|
await this.ability.ensureCan(user, 'read', File, fileId);
|
||||||
|
|
||||||
|
// 2. Load file (DMS API, inside the boundary)
|
||||||
|
const fileBytes = await this.storageService.read(fileId);
|
||||||
|
|
||||||
|
// 3. Call Admin Desktop AI over HTTP
|
||||||
|
const raw = await this.aiGateway.extract(fileBytes);
|
||||||
|
|
||||||
|
// 4. Validate AI output schema (Zod)
|
||||||
|
const parsed = ExtractedMetadataSchema.parse(raw);
|
||||||
|
|
||||||
|
// 5. Audit log (who, what, when, model, confidence)
|
||||||
|
await this.auditLogger.log({
|
||||||
|
userId: user.id,
|
||||||
|
action: 'ai.extract_metadata',
|
||||||
|
fileId,
|
||||||
|
model: raw.model,
|
||||||
|
confidence: parsed.confidence,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 6. Return — frontend MUST render for human confirmation
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Human-in-the-Loop
|
||||||
|
|
||||||
|
AI output is **never persisted directly**. Users must confirm via `DocumentReviewForm`:
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
<DocumentReviewForm
|
||||||
|
document={doc}
|
||||||
|
aiSuggestions={suggestions}
|
||||||
|
onConfirm={(reviewed) => saveMetadata(reviewed)} // user edits applied
|
||||||
|
/>
|
||||||
|
```
|
||||||
|
|
||||||
|
The `user_confirmed_at` timestamp and diff (AI suggestion → final value) are stored in the audit log.
|
||||||
|
|
||||||
|
### 3. Rate Limiting
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Post('ai/extract')
|
||||||
|
@UseGuards(JwtAuthGuard, CaslAbilityGuard, ThrottlerGuard)
|
||||||
|
@Throttle({ default: { limit: 10, ttl: 60_000 } }) // 10 req/min/user
|
||||||
|
async extract(@Body() dto: ExtractDto) { /* ... */ }
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ❌ Forbidden
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ AI container connecting to DB
|
||||||
|
// docker-compose.yml inside ai-service:
|
||||||
|
// environment:
|
||||||
|
// DATABASE_URL: mysql://... ← NEVER
|
||||||
|
|
||||||
|
// ❌ AI SDK calling cloud API
|
||||||
|
import OpenAI from 'openai'; // ❌ No cloud AI SDKs in production code
|
||||||
|
const client = new OpenAI({ apiKey: ... });
|
||||||
|
|
||||||
|
// ❌ Persisting AI output without human confirm
|
||||||
|
async extractAndSave(fileId: number) {
|
||||||
|
const metadata = await this.ai.extract(fileId);
|
||||||
|
await this.repo.save({ fileId, ...metadata }); // ❌ skips human review
|
||||||
|
}
|
||||||
|
|
||||||
|
// ❌ Skipping audit log
|
||||||
|
const result = await this.aiGateway.extract(bytes); // no logging
|
||||||
|
return result;
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Audit Log Schema
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE ai_audit_log (
|
||||||
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
||||||
|
public_id UUID UNIQUE NOT NULL,
|
||||||
|
user_id INT NOT NULL,
|
||||||
|
action VARCHAR(64) NOT NULL, -- 'ai.extract_metadata', 'ai.classify', etc.
|
||||||
|
file_id INT,
|
||||||
|
model VARCHAR(64), -- 'gemma-4:7b', 'paddleocr-v3'
|
||||||
|
confidence DECIMAL(4,3),
|
||||||
|
input_hash CHAR(64), -- SHA-256 of input for replay detection
|
||||||
|
output_summary JSON,
|
||||||
|
human_confirmed_at DATETIME,
|
||||||
|
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
INDEX idx_user_created (user_id, created_at),
|
||||||
|
INDEX idx_file (file_id)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
- [ADR-018 AI Boundary](../../../../specs/06-Decision-Records/ADR-018-ai-boundary.md)
|
||||||
|
- [ADR-020 AI Intelligence Integration](../../../../specs/06-Decision-Records/ADR-020-ai-intelligence-integration.md)
|
||||||
|
- [ADR-017 Ollama Data Migration](../../../../specs/06-Decision-Records/ADR-017-ollama-data-migration.md)
|
||||||
@@ -0,0 +1,181 @@
|
|||||||
|
---
|
||||||
|
title: Workflow Engine + Document Numbering + Workflow Context (ADR-001 / 002 / 021)
|
||||||
|
impact: CRITICAL
|
||||||
|
impactDescription: DSL-based state machine; double-lock numbering; integrated workflow context exposed to clients.
|
||||||
|
tags: workflow, numbering, redlock, version-column, adr-001, adr-002, adr-021
|
||||||
|
---
|
||||||
|
|
||||||
|
## Workflow Engine + Numbering + Context
|
||||||
|
|
||||||
|
LCBP3 uses a **unified workflow engine** (DSL-based state machine) across RFA, Transmittal, Correspondence, Circulation, and Shop Drawing. Every state transition goes through the same engine — no per-type routing tables.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ADR-001: Unified Workflow Engine
|
||||||
|
|
||||||
|
### State Transition Pattern
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Injectable()
|
||||||
|
export class WorkflowEngine {
|
||||||
|
async transition(
|
||||||
|
instanceId: string,
|
||||||
|
action: WorkflowAction,
|
||||||
|
actor: User,
|
||||||
|
context?: WorkflowContext,
|
||||||
|
): Promise<WorkflowInstance> {
|
||||||
|
// 1. Load current state from DB (never trust client-provided state)
|
||||||
|
const instance = await this.repo.findOneByPublicId(instanceId);
|
||||||
|
if (!instance) throw new NotFoundException();
|
||||||
|
|
||||||
|
// 2. Validate transition against DSL
|
||||||
|
const dsl = await this.dslService.load(instance.workflowTypeId);
|
||||||
|
const nextState = dsl.resolve(instance.currentState, action);
|
||||||
|
if (!nextState) {
|
||||||
|
throw new BusinessException(
|
||||||
|
`Action ${action} not allowed from state ${instance.currentState}`,
|
||||||
|
'ไม่สามารถดำเนินการนี้ได้ในสถานะปัจจุบัน',
|
||||||
|
'กรุณาตรวจสอบขั้นตอนการอนุมัติ',
|
||||||
|
'WF_INVALID_TRANSITION',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Apply transition atomically (optimistic lock via @VersionColumn)
|
||||||
|
instance.currentState = nextState;
|
||||||
|
await this.repo.save(instance); // throws OptimisticLockVersionMismatchError on race
|
||||||
|
|
||||||
|
// 4. Emit event for listeners (notifications via BullMQ — ADR-008)
|
||||||
|
this.eventBus.publish(new WorkflowTransitionedEvent(instance, action, actor));
|
||||||
|
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### ❌ Anti-Patterns
|
||||||
|
|
||||||
|
- ❌ Hard-coded `switch (state)` in controllers/services
|
||||||
|
- ❌ Trusting `currentState` from request body
|
||||||
|
- ❌ Creating separate routing tables per document type
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ADR-002: Document Numbering (Double-Lock)
|
||||||
|
|
||||||
|
Concurrent requests for a new document number **must** use both:
|
||||||
|
|
||||||
|
1. **Redis Redlock** — distributed lock across app instances
|
||||||
|
2. **TypeORM `@VersionColumn`** — optimistic lock on counter row
|
||||||
|
|
||||||
|
### Counter Entity
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Entity('document_number_counters')
|
||||||
|
@Unique(['projectId', 'documentTypeId'])
|
||||||
|
export class DocumentNumberCounter extends UuidBaseEntity {
|
||||||
|
@Column({ name: 'project_id' })
|
||||||
|
projectId: number;
|
||||||
|
|
||||||
|
@Column({ name: 'document_type_id' })
|
||||||
|
documentTypeId: number;
|
||||||
|
|
||||||
|
@Column({ name: 'last_number', default: 0 })
|
||||||
|
lastNumber: number;
|
||||||
|
|
||||||
|
@VersionColumn()
|
||||||
|
version: number; // ❗ Optimistic lock — do not rename, do not remove
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Service Pattern
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Injectable()
|
||||||
|
export class DocumentNumberingService {
|
||||||
|
constructor(
|
||||||
|
@InjectRepository(DocumentNumberCounter)
|
||||||
|
private counterRepo: Repository<DocumentNumberCounter>,
|
||||||
|
private redlock: RedlockService,
|
||||||
|
private readonly logger: Logger,
|
||||||
|
) {}
|
||||||
|
|
||||||
|
async generateNext(ctx: NumberingContext): Promise<string> {
|
||||||
|
const lockKey = `doc_num:${ctx.projectId}:${ctx.documentTypeId}`;
|
||||||
|
|
||||||
|
// Distributed lock — 3s TTL, up to 5 retries
|
||||||
|
const lock = await this.redlock.acquire([lockKey], 3000);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Optimistic lock via @VersionColumn
|
||||||
|
const counter = await this.counterRepo.findOne({
|
||||||
|
where: { projectId: ctx.projectId, documentTypeId: ctx.documentTypeId },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!counter) {
|
||||||
|
throw new NotFoundException('Counter not initialized for this project/type');
|
||||||
|
}
|
||||||
|
|
||||||
|
counter.lastNumber += 1;
|
||||||
|
await this.counterRepo.save(counter); // may throw OptimisticLockVersionMismatchError
|
||||||
|
|
||||||
|
return this.formatNumber(ctx, counter.lastNumber);
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof OptimisticLockVersionMismatchError) {
|
||||||
|
this.logger.warn(`Numbering race detected for ${lockKey}, retrying`);
|
||||||
|
// Let caller retry via BullMQ retry policy
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
} finally {
|
||||||
|
await lock.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private formatNumber(ctx: NumberingContext, seq: number): string {
|
||||||
|
// e.g. "LCBP3-RFA-0042"
|
||||||
|
return `${ctx.projectCode}-${ctx.typeCode}-${String(seq).padStart(4, '0')}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### ❌ Anti-Patterns
|
||||||
|
|
||||||
|
- ❌ App-side counter only (`let counter = 0; counter++`)
|
||||||
|
- ❌ Using `findOne` + `update` without `@VersionColumn`
|
||||||
|
- ❌ Using only Redis lock without DB optimistic lock (race if Redis fails)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ADR-021: Integrated Workflow Context
|
||||||
|
|
||||||
|
Every workflow-aware API response **must** expose:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export class WorkflowEnvelope<T> {
|
||||||
|
data: T;
|
||||||
|
|
||||||
|
workflow: {
|
||||||
|
instancePublicId: string;
|
||||||
|
currentState: string; // e.g. 'pending_review'
|
||||||
|
availableActions: string[]; // e.g. ['approve', 'reject', 'request-revision']
|
||||||
|
canEdit: boolean; // computed from CASL + current state
|
||||||
|
lastTransitionAt: string; // ISO 8601
|
||||||
|
};
|
||||||
|
|
||||||
|
stepAttachments?: Array<{ // files produced by the current/previous step
|
||||||
|
publicId: string;
|
||||||
|
fileName: string;
|
||||||
|
stepCode: string;
|
||||||
|
downloadUrl: string;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Frontend uses `workflow.availableActions` to render buttons — no client-side state machine logic.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
- [ADR-001 Unified Workflow Engine](../../../../specs/06-Decision-Records/ADR-001-unified-workflow-engine.md)
|
||||||
|
- [ADR-002 Document Numbering Strategy](../../../../specs/06-Decision-Records/ADR-002-document-numbering-strategy.md)
|
||||||
|
- [ADR-021 Workflow Context](../../../../specs/06-Decision-Records/ADR-021-workflow-context.md)
|
||||||
@@ -0,0 +1,137 @@
|
|||||||
|
---
|
||||||
|
title: Two-Phase File Upload + ClamAV (ADR-016)
|
||||||
|
impact: CRITICAL
|
||||||
|
impactDescription: Upload → Temp → ClamAV scan → Commit → Permanent. Whitelist + 50MB cap. StorageService only.
|
||||||
|
tags: file-upload, clamav, security, adr-016, storage
|
||||||
|
---
|
||||||
|
|
||||||
|
## Two-Phase File Upload (ADR-016)
|
||||||
|
|
||||||
|
**Never write uploaded files directly to permanent storage.** All uploads must go through:
|
||||||
|
|
||||||
|
```
|
||||||
|
Client → Upload endpoint → Temp storage → ClamAV scan → Commit endpoint → Permanent storage
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Constraints (non-negotiable)
|
||||||
|
|
||||||
|
| Rule | Value |
|
||||||
|
| --- | --- |
|
||||||
|
| Allowed MIME types | `application/pdf`, `image/vnd.dwg`, `application/vnd.openxmlformats-officedocument.wordprocessingml.document`, `application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`, `application/zip` |
|
||||||
|
| Allowed extensions | `.pdf`, `.dwg`, `.docx`, `.xlsx`, `.zip` |
|
||||||
|
| Max size | 50 MB |
|
||||||
|
| Temp TTL | 24 h (purged by cron) |
|
||||||
|
| Virus scan | ClamAV (blocking) |
|
||||||
|
| Mover | `StorageService` only — never `fs.rename` directly from controller |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 1: Upload to Temp
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
@Post('upload')
|
||||||
|
@UseGuards(JwtAuthGuard, ThrottlerGuard)
|
||||||
|
@UseInterceptors(FileInterceptor('file', {
|
||||||
|
limits: { fileSize: 50 * 1024 * 1024 }, // 50 MB
|
||||||
|
}))
|
||||||
|
async uploadTemp(
|
||||||
|
@UploadedFile() file: Express.Multer.File,
|
||||||
|
@CurrentUser() user: User,
|
||||||
|
): Promise<{ tempId: string; expiresAt: string }> {
|
||||||
|
// 1. Validate MIME + extension (defense in depth)
|
||||||
|
this.fileValidator.assertAllowed(file);
|
||||||
|
|
||||||
|
// 2. Scan with ClamAV
|
||||||
|
const scanResult = await this.clamavService.scan(file.buffer);
|
||||||
|
if (!scanResult.clean) {
|
||||||
|
throw new BusinessException(
|
||||||
|
`ClamAV rejected: ${scanResult.signature}`,
|
||||||
|
'ไฟล์ไม่ปลอดภัย ระบบตรวจพบความเสี่ยง',
|
||||||
|
'กรุณาตรวจสอบไฟล์และลองใหม่อีกครั้ง',
|
||||||
|
'FILE_INFECTED',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. Save to temp (encrypted at rest)
|
||||||
|
const tempId = await this.storageService.saveToTemp(file, user.id);
|
||||||
|
|
||||||
|
return {
|
||||||
|
tempId,
|
||||||
|
expiresAt: addHours(new Date(), 24).toISOString(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Phase 2: Commit in Transaction
|
||||||
|
|
||||||
|
The business operation (e.g., creating a Correspondence) promotes temp files to permanent **in the same DB transaction**.
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
async createCorrespondence(dto: CreateCorrespondenceDto, user: User) {
|
||||||
|
return this.dataSource.transaction(async (manager) => {
|
||||||
|
// 1. Create domain entity
|
||||||
|
const entity = await manager.save(Correspondence, {
|
||||||
|
...dto,
|
||||||
|
createdById: user.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 2. Commit temp files → permanent (ACID together with entity)
|
||||||
|
await this.storageService.commitFiles(
|
||||||
|
dto.tempFileIds,
|
||||||
|
{ entityId: entity.id, entityType: 'correspondence' },
|
||||||
|
manager,
|
||||||
|
);
|
||||||
|
|
||||||
|
return entity;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If the transaction rolls back, temp files remain and expire in 24h — no orphaned permanent files.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## StorageService Contract
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export interface StorageService {
|
||||||
|
saveToTemp(file: Express.Multer.File, ownerId: number): Promise<string>;
|
||||||
|
commitFiles(
|
||||||
|
tempIds: string[],
|
||||||
|
target: { entityId: number; entityType: string },
|
||||||
|
manager: EntityManager,
|
||||||
|
): Promise<FileRecord[]>;
|
||||||
|
purgeExpiredTemp(): Promise<number>; // called by cron
|
||||||
|
getPermanentPath(fileId: number): Promise<string>;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ❌ Forbidden
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// ❌ Direct write to permanent
|
||||||
|
fs.writeFileSync(`/var/storage/${file.originalname}`, file.buffer);
|
||||||
|
|
||||||
|
// ❌ Skip ClamAV
|
||||||
|
await this.storageService.savePermanent(file);
|
||||||
|
|
||||||
|
// ❌ Non-whitelist MIME
|
||||||
|
@UseInterceptors(FileInterceptor('file')) // no size or type limit
|
||||||
|
|
||||||
|
// ❌ Commit outside transaction
|
||||||
|
const entity = await this.repo.save(...);
|
||||||
|
await this.storageService.commitFiles(tempIds, ...); // race: entity exists, files may fail
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
- [ADR-016 Security & Authentication](../../../../specs/06-Decision-Records/ADR-016-security-authentication.md)
|
||||||
|
- [Edge Cases](../../../../specs/01-Requirements/01-06-edge-cases-and-rules.md) — file upload scenarios
|
||||||
@@ -32,6 +32,7 @@ const CATEGORIES = [
|
|||||||
{ prefix: 'api-', name: 'API Design', impact: 'MEDIUM', section: 8 },
|
{ prefix: 'api-', name: 'API Design', impact: 'MEDIUM', section: 8 },
|
||||||
{ prefix: 'micro-', name: 'Microservices', impact: 'MEDIUM', section: 9 },
|
{ prefix: 'micro-', name: 'Microservices', impact: 'MEDIUM', section: 9 },
|
||||||
{ prefix: 'devops-', name: 'DevOps & Deployment', impact: 'LOW-MEDIUM', section: 10 },
|
{ prefix: 'devops-', name: 'DevOps & Deployment', impact: 'LOW-MEDIUM', section: 10 },
|
||||||
|
{ prefix: 'lcbp3-', name: 'LCBP3 Project-Specific', impact: 'CRITICAL', section: 11 },
|
||||||
];
|
];
|
||||||
|
|
||||||
interface RuleFrontmatter {
|
interface RuleFrontmatter {
|
||||||
@@ -50,8 +51,10 @@ interface Rule {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function parseFrontmatter(content: string): { frontmatter: RuleFrontmatter | null; body: string } {
|
function parseFrontmatter(content: string): { frontmatter: RuleFrontmatter | null; body: string } {
|
||||||
|
// Normalize CRLF → LF so the regex works on Windows-authored files
|
||||||
|
const normalized = content.replace(/\r\n/g, '\n');
|
||||||
const frontmatterRegex = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/;
|
const frontmatterRegex = /^---\n([\s\S]*?)\n---\n([\s\S]*)$/;
|
||||||
const match = content.match(frontmatterRegex);
|
const match = normalized.match(frontmatterRegex);
|
||||||
|
|
||||||
if (!match) {
|
if (!match) {
|
||||||
return { frontmatter: null, body: content };
|
return { frontmatter: null, body: content };
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
---
|
---
|
||||||
name: next-best-practices
|
name: next-best-practices
|
||||||
description: Next.js best practices - file conventions, RSC boundaries, data patterns, async APIs, metadata, error handling, route handlers, image/font optimization, bundling
|
description: Next.js best practices for LCBP3-DMS frontend. Enforces ADR-019 (publicId only, no parseInt/id fallback), TanStack Query + RHF + Zod, shadcn/ui, i18n, ADR-007 error UX, ADR-021 IntegratedBanner/WorkflowLifecycle, two-phase file upload.
|
||||||
|
version: 1.8.9
|
||||||
|
scope: frontend
|
||||||
user-invocable: false
|
user-invocable: false
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -157,6 +159,24 @@ See [parallel-routes.md](./parallel-routes.md) for:
|
|||||||
- `default.tsx` for fallbacks
|
- `default.tsx` for fallbacks
|
||||||
- Closing modals correctly with `router.back()`
|
- Closing modals correctly with `router.back()`
|
||||||
|
|
||||||
|
## i18n (Thai / English)
|
||||||
|
|
||||||
|
See [i18n.md](./i18n.md) for:
|
||||||
|
|
||||||
|
- `useTranslations('namespace')` pattern
|
||||||
|
- Key naming (kebab-case, feature-namespaced)
|
||||||
|
- When Zod messages stay inline vs i18n
|
||||||
|
- Server-side `userMessage` passthrough
|
||||||
|
|
||||||
|
## Two-Phase File Upload
|
||||||
|
|
||||||
|
See [two-phase-upload.md](./two-phase-upload.md) for:
|
||||||
|
|
||||||
|
- `useDropzone` + `useMutation` hook
|
||||||
|
- `tempFileIds` form-state pattern
|
||||||
|
- Whitelist MIME / max-size (must mirror backend)
|
||||||
|
- Clear-on-submit / expired-temp handling
|
||||||
|
|
||||||
## Self-Hosting
|
## Self-Hosting
|
||||||
|
|
||||||
See [self-hosting.md](./self-hosting.md) for:
|
See [self-hosting.md](./self-hosting.md) for:
|
||||||
@@ -204,28 +224,38 @@ const form = useForm({
|
|||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
### ADR-019 UUID Handling (CRITICAL)
|
### ADR-019 UUID Handling (CRITICAL — March 2026 Pattern)
|
||||||
|
|
||||||
|
> **Updated:** ใช้ `publicId` ตรงๆ — ห้ามใช้ `id ?? ''` fallback หรือ `uuid` ร่วม.
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
// Interface ต้องมีทั้ง id และ publicId
|
// ✅ CORRECT — Interface มีแค่ publicId
|
||||||
interface Contract {
|
interface Contract {
|
||||||
id?: number; // Internal (อาจ undefined)
|
publicId?: string; // UUID from API — ใช้ตัวนี้
|
||||||
publicId?: string; // UUID - ใช้ตัวนี้
|
|
||||||
contractCode: string;
|
contractCode: string;
|
||||||
|
contractName: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Select options - ใช้ pattern นี้เสมอ
|
// ✅ CORRECT — Select options (ไม่มี fallback)
|
||||||
const options = contracts.map((c) => ({
|
const options = contracts.map((c) => ({
|
||||||
label: `${c.contractName} (${c.contractCode})`,
|
label: `${c.contractName} (${c.contractCode})`,
|
||||||
value: String(c.publicId ?? c.id ?? ''), // fallback pattern
|
value: c.publicId ?? '', // ใช้ publicId ล้วน
|
||||||
key: String(c.publicId ?? c.id ?? ''),
|
key: c.publicId ?? c.contractCode, // fallback ไป business field ได้
|
||||||
}));
|
}));
|
||||||
|
|
||||||
// ❌ ห้ามใช้ parseInt บน UUID
|
// ❌ WRONG — pattern เก่า (ห้าม)
|
||||||
// const id = parseInt(projectId); // WRONG!
|
interface OldContract {
|
||||||
|
id?: number; // ❌ อย่า expose INT id
|
||||||
|
uuid?: string; // ❌ ใช้ชื่อ uuid
|
||||||
|
publicId?: string;
|
||||||
|
}
|
||||||
|
const oldValue = String(c.publicId ?? c.id ?? ''); // ❌ `id ?? ''` fallback ห้าม
|
||||||
|
|
||||||
// ✅ ส่ง UUID string ตรงๆ
|
// ❌ NEVER parseInt on UUID
|
||||||
apiClient.get(`/projects/${projectId}`); // projectId is UUID string
|
// const badId = parseInt(projectPublicId); // "019505..." → 19 (WRONG!)
|
||||||
|
|
||||||
|
// ✅ ส่ง UUID string ตรงๆ ไป API
|
||||||
|
apiClient.get(`/projects/${projectPublicId}`);
|
||||||
```
|
```
|
||||||
|
|
||||||
### Naming Conventions
|
### Naming Conventions
|
||||||
@@ -312,13 +342,17 @@ apiClient.interceptors.request.use((config) => {
|
|||||||
|
|
||||||
### Anti-Patterns (ห้ามทำ)
|
### Anti-Patterns (ห้ามทำ)
|
||||||
|
|
||||||
- ❌ Fetch data ใน useEffect โดยตรง
|
- ❌ Fetch data ใน useEffect โดยตรง (ใช้ TanStack Query)
|
||||||
- ❌ Props drilling ลึกเกิน 3 levels
|
- ❌ Props drilling ลึกเกิน 3 levels
|
||||||
- ❌ Inline styles (ใช้ Tailwind)
|
- ❌ Inline styles (ใช้ Tailwind)
|
||||||
- ❌ console.log ใน production
|
- ❌ `console.log` ใน committed code
|
||||||
- ❌ parseInt() บน UUID values
|
- ❌ `parseInt()` / `Number()` / `+` บน UUID values (ADR-019)
|
||||||
|
- ❌ `id ?? ''` fallback บน `publicId` (ใช้ `publicId ?? ''` หรือ fallback ไป business field)
|
||||||
|
- ❌ Expose `uuid` คู่กับ `publicId` ใน interface (ใช้ `publicId` อย่างเดียว)
|
||||||
- ❌ ใช้ index เป็น key ใน list
|
- ❌ ใช้ index เป็น key ใน list
|
||||||
- ❌ Snake_case ใน form field names (ใช้ camelCase)
|
- ❌ Snake_case ใน form field names (ใช้ camelCase)
|
||||||
|
- ❌ Hardcode Thai/English string ใน component (ใช้ i18n keys)
|
||||||
|
- ❌ `any` type (strict mode)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,79 @@
|
|||||||
|
# i18n (Thai / English)
|
||||||
|
|
||||||
|
LCBP3 frontend **must not** hardcode Thai or English UI strings in components.
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
1. **All user-facing strings go through the i18n layer** (`next-intl` / `i18next` — check `frontend/package.json`).
|
||||||
|
2. **Keys use kebab-case**, namespaced by feature:
|
||||||
|
- `correspondence.list.title`
|
||||||
|
- `correspondence.form.submit`
|
||||||
|
- `common.actions.cancel`
|
||||||
|
3. **Comments in code remain Thai** (business logic explanation); **only UI copy** goes through i18n.
|
||||||
|
4. **Error messages** from backend (via ADR-007 `userMessage`) are already localized server-side — render them directly, don't translate client-side.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ❌ Wrong
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
export function CorrespondenceHeader() {
|
||||||
|
return <h1>รายการหนังสือติดต่อ</h1>; // ❌ hardcoded Thai
|
||||||
|
}
|
||||||
|
|
||||||
|
toast.success('บันทึกสำเร็จ'); // ❌ hardcoded
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## ✅ Right
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
import { useTranslations } from 'next-intl';
|
||||||
|
|
||||||
|
export function CorrespondenceHeader() {
|
||||||
|
const t = useTranslations('correspondence.list');
|
||||||
|
return <h1>{t('title')}</h1>;
|
||||||
|
}
|
||||||
|
|
||||||
|
toast.success(t('save.success'));
|
||||||
|
```
|
||||||
|
|
||||||
|
Translation files:
|
||||||
|
|
||||||
|
```json
|
||||||
|
// messages/th.json
|
||||||
|
{
|
||||||
|
"correspondence": {
|
||||||
|
"list": { "title": "รายการหนังสือติดต่อ" },
|
||||||
|
"save": { "success": "บันทึกสำเร็จ" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// messages/en.json
|
||||||
|
{
|
||||||
|
"correspondence": {
|
||||||
|
"list": { "title": "Correspondence List" },
|
||||||
|
"save": { "success": "Saved successfully" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Zod Error Messages
|
||||||
|
|
||||||
|
Zod error messages shown in forms **do** stay in Thai inline (per `specs/05-Engineering-Guidelines/05-03-frontend-guidelines.md`), because they're schema-bound and rarely need translation. If dual-language support becomes required, wrap with an i18n-aware resolver:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
const schema = z.object({
|
||||||
|
projectUuid: z.string().uuid(t('validation.project.required')),
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
- [i18n Guidelines](../../../specs/05-Engineering-Guidelines/05-08-i18n-guidelines.md)
|
||||||
|
- [Frontend Guidelines](../../../specs/05-Engineering-Guidelines/05-03-frontend-guidelines.md)
|
||||||
@@ -0,0 +1,100 @@
|
|||||||
|
# Two-Phase File Upload (Frontend)
|
||||||
|
|
||||||
|
Pair with [backend two-phase upload rule](../nestjs-best-practices/rules/security-file-two-phase-upload.md).
|
||||||
|
|
||||||
|
## Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
User drops file
|
||||||
|
→ POST /files/upload (temp) → { tempId, expiresAt }
|
||||||
|
→ store tempId in form state
|
||||||
|
→ user submits form
|
||||||
|
→ POST /correspondences (with tempFileIds) → backend commits in transaction
|
||||||
|
```
|
||||||
|
|
||||||
|
## Hook Pattern
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
'use client';
|
||||||
|
|
||||||
|
import { useDropzone } from 'react-dropzone';
|
||||||
|
import { useMutation } from '@tanstack/react-query';
|
||||||
|
|
||||||
|
export function useTwoPhaseUpload() {
|
||||||
|
const uploadTemp = useMutation({
|
||||||
|
mutationFn: async (file: File) => {
|
||||||
|
const fd = new FormData();
|
||||||
|
fd.append('file', file);
|
||||||
|
const { data } = await apiClient.post<{ tempId: string; expiresAt: string }>(
|
||||||
|
'/files/upload',
|
||||||
|
fd,
|
||||||
|
);
|
||||||
|
return data;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return uploadTemp;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Form Integration (RHF)
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
export function CorrespondenceForm() {
|
||||||
|
const form = useForm<FormData>({ resolver: zodResolver(schema) });
|
||||||
|
const uploadTemp = useTwoPhaseUpload();
|
||||||
|
const [tempFileIds, setTempFileIds] = useState<string[]>([]);
|
||||||
|
|
||||||
|
const { getRootProps, getInputProps } = useDropzone({
|
||||||
|
accept: {
|
||||||
|
'application/pdf': ['.pdf'],
|
||||||
|
'image/vnd.dwg': ['.dwg'],
|
||||||
|
'application/vnd.openxmlformats-officedocument.wordprocessingml.document': ['.docx'],
|
||||||
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet': ['.xlsx'],
|
||||||
|
'application/zip': ['.zip'],
|
||||||
|
},
|
||||||
|
maxSize: 50 * 1024 * 1024, // 50 MB — must match backend
|
||||||
|
onDrop: async (files) => {
|
||||||
|
const results = await Promise.all(files.map((f) => uploadTemp.mutateAsync(f)));
|
||||||
|
setTempFileIds((prev) => [...prev, ...results.map((r) => r.tempId)]);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const onSubmit = async (values: FormData) => {
|
||||||
|
await correspondenceService.create({
|
||||||
|
...values,
|
||||||
|
tempFileIds, // committed server-side in the same DB transaction
|
||||||
|
});
|
||||||
|
setTempFileIds([]);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<form onSubmit={form.handleSubmit(onSubmit)}>
|
||||||
|
<div {...getRootProps()} className="dropzone">
|
||||||
|
<input {...getInputProps()} />
|
||||||
|
<p>{t('upload.dragDrop')}</p>
|
||||||
|
</div>
|
||||||
|
{/* other fields */}
|
||||||
|
</form>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Rules
|
||||||
|
|
||||||
|
- **Whitelist MIME types** — must mirror backend ADR-016 whitelist (`.pdf`, `.dwg`, `.docx`, `.xlsx`, `.zip`).
|
||||||
|
- **50 MB cap** — enforce client-side too (better UX) plus server-side (authoritative).
|
||||||
|
- **Show temp-file pills** with remove button — users see what will be attached.
|
||||||
|
- **Clear `tempFileIds` on success/cancel** — prevent stale IDs on subsequent submits.
|
||||||
|
- **No retry of expired temps** — if `expiresAt` passed, prompt re-upload.
|
||||||
|
|
||||||
|
## ❌ Forbidden
|
||||||
|
|
||||||
|
- ❌ Uploading directly to permanent storage endpoint (no commit phase)
|
||||||
|
- ❌ Hardcoded MIME list in component (keep in shared constant file mirrored from backend)
|
||||||
|
- ❌ Ignoring `maxSize` — backend will reject but UX suffers
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
- [ADR-016 Security](../../../specs/06-Decision-Records/ADR-016-security-authentication.md)
|
||||||
|
- Backend rule: [`security-file-two-phase-upload.md`](../nestjs-best-practices/rules/security-file-two-phase-upload.md)
|
||||||
@@ -1,17 +1,19 @@
|
|||||||
# UUID Handling (ADR-019)
|
# UUID Handling (ADR-019) — March 2026 Pattern
|
||||||
|
|
||||||
**Project-specific: Hybrid Identifier Strategy for NAP-DMS**
|
**Project-specific: Hybrid Identifier Strategy for NAP-DMS**
|
||||||
|
|
||||||
This project uses ADR-019: INT Primary Key (internal) + UUIDv7 (public API). Frontend code must handle this correctly.
|
This project uses ADR-019: INT Primary Key (internal) + UUIDv7 (public API). Frontend code must handle this correctly.
|
||||||
|
|
||||||
|
> **Updated pattern:** Backend exposes `publicId` directly — ไม่มี `@Expose({ name: 'id' })` rename แล้ว. Frontend ใช้ `publicId` ตรงๆ — ห้าม fallback ไป `id`.
|
||||||
|
|
||||||
## The Pattern
|
## The Pattern
|
||||||
|
|
||||||
| Source | Field Name | Type | Notes |
|
| Source | Field Name | Type | Notes |
|
||||||
|--------|------------|------|-------|
|
| ------------------------ | ------------------- | ----------------- | ----------------------------------------------------------- |
|
||||||
| **API Response** | `id` | `string` (UUID) | Actually `publicId` exposed via `@Expose({ name: 'id' })` |
|
| **API Response** | `publicId` | `string` (UUIDv7) | Exposed directly (no rename) |
|
||||||
| **TypeScript Interface** | `publicId?: string` | UUID string | Use this for all references |
|
| **TypeScript Interface** | `publicId?: string` | UUID string | ใช้ตัวนี้เท่านั้น |
|
||||||
| **Fallback** | `id?: number` | INT (internal) | May be undefined due to `@Exclude()` |
|
| **Form DTO** | `xxxUuid` | `string` | DTO field names: `projectUuid`, `contractUuid` (input only) |
|
||||||
| **Form Values** | `xxxUuid` | `string` | DTO field names: `projectUuid`, `contractUuid` |
|
| **URL param** | `[publicId]` | `string` (UUID) | e.g. `/correspondences/[publicId]/page.tsx` |
|
||||||
|
|
||||||
## Critical Rules
|
## Critical Rules
|
||||||
|
|
||||||
@@ -31,22 +33,26 @@ const id = +projectId; // NaN
|
|||||||
apiClient.get(`/projects/${projectId}`); // projectId is already UUID string
|
apiClient.get(`/projects/${projectId}`); // projectId is already UUID string
|
||||||
```
|
```
|
||||||
|
|
||||||
### 2. Use `publicId ?? id` Pattern
|
### 2. Use `publicId` Only — NO `id ?? ''` Fallback
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
// types/project.ts
|
// ✅ CORRECT — types/project.ts
|
||||||
interface Project {
|
interface Project {
|
||||||
id?: number; // Internal INT (may be undefined)
|
publicId?: string; // UUID from API — ใช้ตัวนี้เท่านั้น
|
||||||
publicId?: string; // UUID from API (use this)
|
|
||||||
projectCode: string;
|
projectCode: string;
|
||||||
projectName: string;
|
projectName: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Component usage
|
// ✅ CORRECT — Component usage
|
||||||
const projectOptions = projects.map((p) => ({
|
const projectOptions = projects.map((p) => ({
|
||||||
label: `${p.projectName} (${p.projectCode})`,
|
label: `${p.projectName} (${p.projectCode})`,
|
||||||
value: String(p.publicId ?? p.id ?? ''), // ADR-019 pattern
|
value: p.publicId ?? '', // ADR-019 — ไม่ต้อง String() และไม่ไป id
|
||||||
key: String(p.publicId ?? p.id ?? ''),
|
key: p.publicId ?? p.projectCode, // fallback ไป business field ได้
|
||||||
|
}));
|
||||||
|
|
||||||
|
// ❌ WRONG — pattern เก่า
|
||||||
|
const oldOptions = projects.map((p) => ({
|
||||||
|
value: String(p.publicId ?? p.id ?? ''), // ❌ `id ?? ''` fallback
|
||||||
}));
|
}));
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -84,14 +90,13 @@ export function ContractSelect({ contracts, value, onChange }: ContractSelectPro
|
|||||||
<SelectValue placeholder="เลือกสัญญา" />
|
<SelectValue placeholder="เลือกสัญญา" />
|
||||||
</SelectTrigger>
|
</SelectTrigger>
|
||||||
<SelectContent>
|
<SelectContent>
|
||||||
{contracts.map((c) => (
|
{contracts
|
||||||
<SelectItem
|
.filter((c) => !!c.publicId) // กรอง contract ที่มี publicId เท่านั้น
|
||||||
key={String(c.publicId ?? c.id ?? '')}
|
.map((c) => (
|
||||||
value={String(c.publicId ?? c.id ?? '')}
|
<SelectItem key={c.publicId} value={c.publicId!}>
|
||||||
>
|
{c.contractName} ({c.contractCode})
|
||||||
{c.contractName} ({c.contractCode})
|
</SelectItem>
|
||||||
</SelectItem>
|
))}
|
||||||
))}
|
|
||||||
</SelectContent>
|
</SelectContent>
|
||||||
</Select>
|
</Select>
|
||||||
);
|
);
|
||||||
@@ -113,7 +118,9 @@ const columns: ColumnDef<Discipline>[] = [
|
|||||||
cell: ({ row }) => {
|
cell: ({ row }) => {
|
||||||
const contract = row.original.contract;
|
const contract = row.original.contract;
|
||||||
return contract ? (
|
return contract ? (
|
||||||
<span>{contract.contractName} ({contract.contractCode})</span>
|
<span>
|
||||||
|
{contract.contractName} ({contract.contractCode})
|
||||||
|
</span>
|
||||||
) : (
|
) : (
|
||||||
<span className="text-muted-foreground">-</span>
|
<span className="text-muted-foreground">-</span>
|
||||||
);
|
);
|
||||||
@@ -153,10 +160,9 @@ export const contractService = {
|
|||||||
## TypeScript Interfaces
|
## TypeScript Interfaces
|
||||||
|
|
||||||
```tsx
|
```tsx
|
||||||
// types/entities.ts
|
// ✅ CORRECT — types/entities.ts
|
||||||
export interface BaseEntity {
|
export interface BaseEntity {
|
||||||
id?: number; // Internal INT - may be undefined
|
publicId?: string; // UUID — ใช้ตัวนี้เท่านั้น (ไม่มี INT id ใน interface)
|
||||||
publicId?: string; // UUID - use this for API calls
|
|
||||||
createdAt?: string;
|
createdAt?: string;
|
||||||
updatedAt?: string;
|
updatedAt?: string;
|
||||||
}
|
}
|
||||||
@@ -170,14 +176,12 @@ export interface Project extends BaseEntity {
|
|||||||
export interface Contract extends BaseEntity {
|
export interface Contract extends BaseEntity {
|
||||||
contractCode: string;
|
contractCode: string;
|
||||||
contractName: string;
|
contractName: string;
|
||||||
projectId?: number; // Internal INT FK
|
project?: Project; // Relation (nested entity)
|
||||||
projectUuid?: string; // UUID for DTOs
|
|
||||||
project?: Project; // Relation
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// DTOs
|
// DTO (input only — รับ UUID จาก form)
|
||||||
export interface CreateContractDto {
|
export interface CreateContractDto {
|
||||||
projectUuid: string; // Accept UUID from form
|
projectUuid: string; // UUID string from select
|
||||||
contractCode: string;
|
contractCode: string;
|
||||||
contractName: string;
|
contractName: string;
|
||||||
}
|
}
|
||||||
@@ -215,9 +219,7 @@ export function ContractForm() {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<Form {...form}>
|
<Form {...form}>
|
||||||
<form onSubmit={form.handleSubmit(onSubmit)}>
|
<form onSubmit={form.handleSubmit(onSubmit)}>{/* Form fields */}</form>
|
||||||
{/* Form fields */}
|
|
||||||
</form>
|
|
||||||
</Form>
|
</Form>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -238,12 +240,13 @@ export default async function ContractPage({ params }: { params: Promise<{ id: s
|
|||||||
|
|
||||||
## Common Pitfalls
|
## Common Pitfalls
|
||||||
|
|
||||||
| Pitfall | Wrong | Right |
|
| Pitfall | ❌ Wrong | ✅ Right |
|
||||||
|---------|-------|-------|
|
| ---------------------------- | ------------------------------------------------ | --------------------------------- |
|
||||||
| Assuming `entity.id` exists | `key={entity.id}` | `key={entity.publicId ?? entity.id}` |
|
| Using INT `id` | `key={entity.id}` | `key={entity.publicId}` |
|
||||||
| parseInt on UUID | `parseInt(projectId)` | `projectId` (string) |
|
| parseInt on UUID | `parseInt(projectId)` | `projectId` (string) |
|
||||||
| Field name mismatch | `name="project_id"` | `name="projectUuid"` |
|
| Field name mismatch | `name="project_id"` | `name="projectUuid"` |
|
||||||
| Missing fallback | `value={entity.publicId}` | `value={entity.publicId ?? entity.id ?? ''}` |
|
| `id ?? ''` fallback | `value={publicId ?? id ?? ''}` | `value={publicId ?? ''}` |
|
||||||
|
| `uuid` + `publicId` together | `interface { uuid?: string; publicId?: string }` | `interface { publicId?: string }` |
|
||||||
|
|
||||||
## Reference
|
## Reference
|
||||||
|
|
||||||
|
|||||||
+17
-14
@@ -1,17 +1,20 @@
|
|||||||
# 🧠 NAP-DMS Agent Skills (v1.8.6)
|
# 🧠 NAP-DMS Agent Skills (v1.8.9)
|
||||||
|
|
||||||
ไฟล์นี้กำหนดทักษะและความสามารถเฉพาะทางของ Document Intelligence Engine สำหรับโครงการ LCBP3 v1.8.6 เพื่อรักษามาตรฐานสูงสุดด้าน Security และ Data Integrity
|
ไฟล์นี้กำหนดทักษะและความสามารถเฉพาะทางของ Document Intelligence Engine สำหรับโครงการ LCBP3 v1.8.9 เพื่อรักษามาตรฐานสูงสุดด้าน Security และ Data Integrity
|
||||||
|
|
||||||
**Status**: Production Ready | **Last Updated**: 2026-04-14 | **Total Skills**: 20
|
**Status**: Production Ready | **Last Updated**: 2026-04-22 | **Total Skills**: 20
|
||||||
|
|
||||||
|
> 📌 Shared context for all speckit-\* skills: see [`_LCBP3-CONTEXT.md`](./_LCBP3-CONTEXT.md).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🏗️ Architectural & Data Integrity
|
## 🏗️ Architectural & Data Integrity
|
||||||
|
|
||||||
- **Identifier Strategy Mastery (ADR-019):**
|
- **Identifier Strategy Mastery (ADR-019 — March 2026):**
|
||||||
- บังคับใช้ **UUIDv7** เป็น Public ID ใน API และ URL เสมอ
|
- บังคับใช้ **UUIDv7** เป็น Public ID; entity สืบทอดจาก `UuidBaseEntity` และเปิด `publicId` **ตรงๆ** (ห้ามใช้ `@Expose({ name: 'id' })` rename)
|
||||||
- ตรวจสอบและป้องกันการใช้ `parseInt()`, `Number()`, หรือตัวดำเนินการทางคณิตศาสตร์ (`+`) กับ UUID
|
- ตรวจสอบและป้องกันการใช้ `parseInt()`, `Number()`, หรือ `+` กับ UUID ทั้ง backend/frontend
|
||||||
- ตรวจสอบว่า Entity มีการใช้ `@Exclude()` บน Primary Key ที่เป็น `INT AUTO_INCREMENT` เพื่อไม่ให้หลุดออกไปยัง API
|
- ตรวจสอบว่า Entity มีการใช้ `@Exclude()` บน Primary Key `INT AUTO_INCREMENT` เพื่อไม่ให้หลุดออกไปยัง API
|
||||||
|
- Frontend ใช้ `publicId` ตรงๆ — **ห้าม** `id ?? ''` fallback หรือมี `uuid?: string` คู่กับ `publicId` ใน interface
|
||||||
- **Strict Validation Engine:**
|
- **Strict Validation Engine:**
|
||||||
- บังคับใช้ **Zod** สำหรับการทำ Form Validation ฝั่ง Frontend
|
- บังคับใช้ **Zod** สำหรับการทำ Form Validation ฝั่ง Frontend
|
||||||
- บังคับใช้ **class-validator** สำหรับ Backend DTOs
|
- บังคับใช้ **class-validator** สำหรับ Backend DTOs
|
||||||
@@ -81,22 +84,22 @@
|
|||||||
|
|
||||||
## 🛠️ Skill Health Monitoring
|
## 🛠️ Skill Health Monitoring
|
||||||
|
|
||||||
### Health Check Scripts
|
### Health Check Scripts (from repo root)
|
||||||
|
|
||||||
- **Bash**: `./scripts/bash/audit-skills.sh` - Comprehensive skill health audit
|
- **Bash**: `./.agents/scripts/bash/audit-skills.sh` - Comprehensive skill health audit
|
||||||
- **PowerShell**: `./scripts/powershell/audit-skills.ps1` - Windows equivalent
|
- **PowerShell**: `./.agents/scripts/powershell/audit-skills.ps1` - Windows equivalent
|
||||||
|
|
||||||
### Validation Scripts
|
### Validation Scripts
|
||||||
|
|
||||||
- **Version Check**: `./scripts/bash/validate-versions.sh` - Ensure version consistency
|
- **Version Check**: `./.agents/scripts/bash/validate-versions.sh` - Ensure version consistency
|
||||||
- **Workflow Sync**: `./scripts/bash/sync-workflows.sh` - Verify workflow integration
|
- **Workflow Sync**: `./.agents/scripts/bash/sync-workflows.sh` - Verify workflow integration
|
||||||
|
|
||||||
### Health Metrics
|
### Health Metrics
|
||||||
|
|
||||||
- **Total Skills**: 20 implemented
|
- **Total Skills**: 20 implemented
|
||||||
- **Version Alignment**: v1.8.6 across all skills
|
- **Version Alignment**: v1.8.9 across all skills
|
||||||
- **Template Coverage**: 100% for skills requiring templates
|
- **Template Coverage**: 100% for skills requiring templates
|
||||||
- **Documentation**: Complete front matter and sections
|
- **Documentation**: Complete front matter + shared `_LCBP3-CONTEXT.md` appendix
|
||||||
|
|
||||||
### Maintenance Schedule
|
### Maintenance Schedule
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-analyze
|
name: speckit-analyze
|
||||||
description: Perform a non-destructive cross-artifact consistency and quality analysis across spec.md, plan.md, and tasks.md after task generation.
|
description: Perform a non-destructive cross-artifact consistency and quality analysis across spec.md, plan.md, and tasks.md after task generation.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on:
|
depends-on:
|
||||||
- speckit-tasks
|
- speckit-tasks
|
||||||
---
|
---
|
||||||
@@ -28,7 +28,7 @@ Identify inconsistencies, duplications, ambiguities, and underspecified items ac
|
|||||||
|
|
||||||
**STRICTLY READ-ONLY**: Do **not** modify any files. Output a structured analysis report. Offer an optional remediation plan (user must explicitly approve before any follow-up editing commands would be invoked manually).
|
**STRICTLY READ-ONLY**: Do **not** modify any files. Output a structured analysis report. Offer an optional remediation plan (user must explicitly approve before any follow-up editing commands would be invoked manually).
|
||||||
|
|
||||||
**Constitution Authority**: The project constitution (`.specify/memory/constitution.md`) is **non-negotiable** within this analysis scope. Constitution conflicts are automatically CRITICAL and require adjustment of the spec, plan, or tasks—not dilution, reinterpretation, or silent ignoring of the principle. If a principle itself needs to change, that must occur in a separate, explicit constitution update outside `/speckit-analyze`.
|
**Constitution Authority**: The project constitution (`AGENTS.md`) is **non-negotiable** within this analysis scope. Constitution conflicts are automatically CRITICAL and require adjustment of the spec, plan, or tasks—not dilution, reinterpretation, or silent ignoring of the principle. If a principle itself needs to change, that must occur in a separate, explicit constitution update outside `/speckit-analyze`.
|
||||||
|
|
||||||
### Steps
|
### Steps
|
||||||
|
|
||||||
@@ -72,7 +72,7 @@ Load only the minimal necessary context from each artifact:
|
|||||||
|
|
||||||
**From constitution:**
|
**From constitution:**
|
||||||
|
|
||||||
- Load `.specify/memory/constitution.md` for principle validation
|
- Load `AGENTS.md` for principle validation
|
||||||
|
|
||||||
### 3. Build Semantic Models
|
### 3. Build Semantic Models
|
||||||
|
|
||||||
@@ -192,3 +192,15 @@ Ask the user: "Would you like me to suggest concrete remediation edits for the t
|
|||||||
## Context
|
## Context
|
||||||
|
|
||||||
{{args}}
|
{{args}}
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-checker
|
name: speckit-checker
|
||||||
description: Run static analysis tools and aggregate results.
|
description: Run static analysis tools and aggregate results.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on: []
|
depends-on: []
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -157,3 +157,15 @@ Auto-detect available tools, run them, and aggregate results into a prioritized
|
|||||||
- **Be Actionable**: Every issue should have a clear fix path
|
- **Be Actionable**: Every issue should have a clear fix path
|
||||||
- **Don't Duplicate**: Dedupe issues found by multiple tools
|
- **Don't Duplicate**: Dedupe issues found by multiple tools
|
||||||
- **Respect Configs**: Honor project's existing linter configs
|
- **Respect Configs**: Honor project's existing linter configs
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-checklist
|
name: speckit-checklist
|
||||||
description: Generate a custom checklist for the current feature based on user requirements.
|
description: Generate a custom checklist for the current feature based on user requirements.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
---
|
---
|
||||||
|
|
||||||
## Checklist Purpose: "Unit Tests for English"
|
## Checklist Purpose: "Unit Tests for English"
|
||||||
@@ -300,3 +300,15 @@ Sample items:
|
|||||||
- Correct: Validation of requirement quality
|
- Correct: Validation of requirement quality
|
||||||
- Wrong: "Does it do X?"
|
- Wrong: "Does it do X?"
|
||||||
- Correct: "Is X clearly specified?"
|
- Correct: "Is X clearly specified?"
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-clarify
|
name: speckit-clarify
|
||||||
description: Identify underspecified areas in the current feature spec by asking up to 5 highly targeted clarification questions and encoding answers back into the spec.
|
description: Identify underspecified areas in the current feature spec by asking up to 5 highly targeted clarification questions and encoding answers back into the spec.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on:
|
depends-on:
|
||||||
- speckit-specify
|
- speckit-specify
|
||||||
handoffs:
|
handoffs:
|
||||||
@@ -189,3 +189,15 @@ Behavior rules:
|
|||||||
- If quota reached with unresolved high-impact categories remaining, explicitly flag them under Deferred with rationale.
|
- If quota reached with unresolved high-impact categories remaining, explicitly flag them under Deferred with rationale.
|
||||||
|
|
||||||
Context for prioritization: {{args}}
|
Context for prioritization: {{args}}
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-constitution
|
name: speckit-constitution
|
||||||
description: Create or update the project constitution from interactive or provided principle inputs, ensuring all dependent templates stay in sync.
|
description: Create or update the project constitution from interactive or provided principle inputs, ensuring all dependent templates stay in sync.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
handoffs:
|
handoffs:
|
||||||
- label: Build Specification
|
- label: Build Specification
|
||||||
agent: speckit-specify
|
agent: speckit-specify
|
||||||
@@ -24,11 +24,11 @@ You are the **Antigravity Governance Architect**. Your role is to establish and
|
|||||||
|
|
||||||
### Outline
|
### Outline
|
||||||
|
|
||||||
You are updating the project constitution at `.specify/memory/constitution.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts.
|
You are updating the project constitution at `AGENTS.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts.
|
||||||
|
|
||||||
Follow this execution flow:
|
Follow this execution flow:
|
||||||
|
|
||||||
1. Load the existing constitution template at `memory/constitution.md`.
|
1. Load the existing constitution template at `AGENTS.md`.
|
||||||
- Identify every placeholder token of the form `[ALL_CAPS_IDENTIFIER]`.
|
- Identify every placeholder token of the form `[ALL_CAPS_IDENTIFIER]`.
|
||||||
**IMPORTANT**: The user might require less or more principles than the ones used in the template. If a number is specified, respect that - follow the general template. You will update the doc accordingly.
|
**IMPORTANT**: The user might require less or more principles than the ones used in the template. If a number is specified, respect that - follow the general template. You will update the doc accordingly.
|
||||||
|
|
||||||
@@ -49,10 +49,10 @@ Follow this execution flow:
|
|||||||
- Ensure Governance section lists amendment procedure, versioning policy, and compliance review expectations.
|
- Ensure Governance section lists amendment procedure, versioning policy, and compliance review expectations.
|
||||||
|
|
||||||
4. Consistency propagation checklist (convert prior checklist into active validations):
|
4. Consistency propagation checklist (convert prior checklist into active validations):
|
||||||
- Read `.specify/templates/plan-template.md` and ensure any "Constitution Check" or rules align with updated principles.
|
- Read `.agents/skills/speckit-plan/templates/plan-template.md` and ensure any "Constitution Check" or rules align with updated principles.
|
||||||
- Read `.specify/templates/spec-template.md` for scope/requirements alignment—update if constitution adds/removes mandatory sections or constraints.
|
- Read `.agents/skills/speckit-specify/templates/spec-template.md` for scope/requirements alignment—update if constitution adds/removes mandatory sections or constraints.
|
||||||
- Read `.specify/templates/tasks-template.md` and ensure task categorization reflects new or removed principle-driven task types (e.g., observability, versioning, testing discipline).
|
- Read `.agents/skills/speckit-tasks/templates/tasks-template.md` and ensure task categorization reflects new or removed principle-driven task types (e.g., observability, versioning, testing discipline).
|
||||||
- Read each command file in `.specify/templates/commands/*.md` (including this one) to verify no outdated references (agent-specific names like CLAUDE only) remain when generic guidance is required.
|
- Read each command file in `.agents/skills/*.md` (including this one) to verify no outdated references (agent-specific names like CLAUDE only) remain when generic guidance is required.
|
||||||
- Read any runtime guidance docs (e.g., `README.md`, `docs/quickstart.md`, or agent-specific guidance files if present). Update references to principles changed.
|
- Read any runtime guidance docs (e.g., `README.md`, `docs/quickstart.md`, or agent-specific guidance files if present). Update references to principles changed.
|
||||||
|
|
||||||
5. Produce a Sync Impact Report (prepend as an HTML comment at top of the constitution file after update):
|
5. Produce a Sync Impact Report (prepend as an HTML comment at top of the constitution file after update):
|
||||||
@@ -69,7 +69,7 @@ Follow this execution flow:
|
|||||||
- Dates ISO format YYYY-MM-DD.
|
- Dates ISO format YYYY-MM-DD.
|
||||||
- Principles are declarative, testable, and free of vague language ("should" → replace with MUST/SHOULD rationale where appropriate).
|
- Principles are declarative, testable, and free of vague language ("should" → replace with MUST/SHOULD rationale where appropriate).
|
||||||
|
|
||||||
7. Write the completed constitution back to `.specify/memory/constitution.md` (overwrite).
|
7. Write the completed constitution back to `AGENTS.md` (overwrite).
|
||||||
|
|
||||||
8. Output a final summary to the user with:
|
8. Output a final summary to the user with:
|
||||||
- New version and bump rationale.
|
- New version and bump rationale.
|
||||||
@@ -87,4 +87,16 @@ If the user supplies partial updates (e.g., only one principle revision), still
|
|||||||
|
|
||||||
If critical info missing (e.g., ratification date truly unknown), insert `TODO(<FIELD_NAME>): explanation` and include in the Sync Impact Report under deferred items.
|
If critical info missing (e.g., ratification date truly unknown), insert `TODO(<FIELD_NAME>): explanation` and include in the Sync Impact Report under deferred items.
|
||||||
|
|
||||||
Do not create a new template; always operate on the existing `.specify/memory/constitution.md` file.
|
Do not create a new template; always operate on the existing `AGENTS.md` file.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-diff
|
name: speckit-diff
|
||||||
description: Compare two versions of a spec or plan to highlight changes.
|
description: Compare two versions of a spec or plan to highlight changes.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on: []
|
depends-on: []
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -84,3 +84,15 @@ Compare two versions of a specification artifact and produce a structured diff r
|
|||||||
- **Highlight Impact**: Explain what each change means for implementation
|
- **Highlight Impact**: Explain what each change means for implementation
|
||||||
- **Flag Breaking Changes**: Any change that invalidates existing work
|
- **Flag Breaking Changes**: Any change that invalidates existing work
|
||||||
- **Ignore Whitespace**: Focus on semantic changes, not formatting
|
- **Ignore Whitespace**: Focus on semantic changes, not formatting
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-implement
|
name: speckit-implement
|
||||||
description: Execute the implementation plan by processing and executing all tasks defined in tasks.md (with Ironclad Anti-Regression Protocols)
|
description: Execute the implementation plan by processing and executing all tasks defined in tasks.md (with Ironclad Anti-Regression Protocols)
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on:
|
depends-on:
|
||||||
- speckit-tasks
|
- speckit-tasks
|
||||||
---
|
---
|
||||||
@@ -81,7 +81,7 @@ At the start of execution and after every 3 modifications:
|
|||||||
|
|
||||||
### Outline
|
### Outline
|
||||||
|
|
||||||
1. Run `.specify/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
1. Run `../scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||||
|
|
||||||
2. **Check checklists status** (if FEATURE_DIR/checklists/ exists):
|
2. **Check checklists status** (if FEATURE_DIR/checklists/ exists):
|
||||||
- Scan all checklist files in the checklists/ directory
|
- Scan all checklist files in the checklists/ directory
|
||||||
@@ -246,3 +246,15 @@ At the start of execution and after every 3 modifications:
|
|||||||
---
|
---
|
||||||
|
|
||||||
Note: This command assumes a complete task breakdown exists in tasks.md. If tasks are incomplete or missing, suggest running `/speckit-tasks` first to regenerate the task list.
|
Note: This command assumes a complete task breakdown exists in tasks.md. If tasks are incomplete or missing, suggest running `/speckit-tasks` first to regenerate the task list.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-migrate
|
name: speckit-migrate
|
||||||
description: Migrate existing projects into the speckit structure by generating spec.md, plan.md, and tasks.md from existing code.
|
description: Migrate existing projects into the speckit structure by generating spec.md, plan.md, and tasks.md from existing code.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on: []
|
depends-on: []
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -116,3 +116,15 @@ Analyze an existing codebase and generate speckit artifacts (spec.md, plan.md, t
|
|||||||
- **Preserve Intent**: Use code comments and naming to understand purpose
|
- **Preserve Intent**: Use code comments and naming to understand purpose
|
||||||
- **Flag TODOs**: Any TODO/FIXME/HACK in code becomes an open task
|
- **Flag TODOs**: Any TODO/FIXME/HACK in code becomes an open task
|
||||||
- **Be Conservative**: When unsure, ask rather than assume
|
- **Be Conservative**: When unsure, ask rather than assume
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-plan
|
name: speckit-plan
|
||||||
description: Execute the implementation planning workflow using the plan template to generate design artifacts.
|
description: Execute the implementation planning workflow using the plan template to generate design artifacts.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on:
|
depends-on:
|
||||||
- speckit-specify
|
- speckit-specify
|
||||||
handoffs:
|
handoffs:
|
||||||
@@ -32,7 +32,7 @@ You are the **Antigravity System Architect**. Your role is to bridge the gap bet
|
|||||||
|
|
||||||
1. **Setup**: Run `../scripts/bash/setup-plan.sh --json` from repo root and parse JSON for FEATURE_SPEC, IMPL_PLAN, SPECS_DIR, BRANCH. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
1. **Setup**: Run `../scripts/bash/setup-plan.sh --json` from repo root and parse JSON for FEATURE_SPEC, IMPL_PLAN, SPECS_DIR, BRANCH. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||||
|
|
||||||
2. **Load context**: Read FEATURE_SPEC and `.specify/memory/constitution.md`. Load IMPL_PLAN template from `templates/plan-template.md`.
|
2. **Load context**: Read FEATURE_SPEC and `AGENTS.md`. Load IMPL_PLAN template from `templates/plan-template.md`.
|
||||||
|
|
||||||
3. **Execute plan workflow**: Follow the structure in IMPL_PLAN template to:
|
3. **Execute plan workflow**: Follow the structure in IMPL_PLAN template to:
|
||||||
- Fill Technical Context (mark unknowns as "NEEDS CLARIFICATION")
|
- Fill Technical Context (mark unknowns as "NEEDS CLARIFICATION")
|
||||||
@@ -85,7 +85,7 @@ You are the **Antigravity System Architect**. Your role is to bridge the gap bet
|
|||||||
- Output OpenAPI/GraphQL schema to `/contracts/`
|
- Output OpenAPI/GraphQL schema to `/contracts/`
|
||||||
|
|
||||||
3. **Agent context update**:
|
3. **Agent context update**:
|
||||||
- Run `../scripts/bash/update-agent-context.sh gemini`
|
- Run `../scripts/bash/update-agent-context.sh windsurf`
|
||||||
- These scripts detect which AI agent is in use
|
- These scripts detect which AI agent is in use
|
||||||
- Update the appropriate agent-specific context file
|
- Update the appropriate agent-specific context file
|
||||||
- Add only new technology from current plan
|
- Add only new technology from current plan
|
||||||
@@ -97,3 +97,15 @@ You are the **Antigravity System Architect**. Your role is to bridge the gap bet
|
|||||||
|
|
||||||
- Use absolute paths
|
- Use absolute paths
|
||||||
- ERROR on gate failures or unresolved clarifications
|
- ERROR on gate failures or unresolved clarifications
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -3,7 +3,7 @@
|
|||||||
**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link]
|
**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link]
|
||||||
**Input**: Feature specification from `/specs/[###-feature-name]/spec.md`
|
**Input**: Feature specification from `/specs/[###-feature-name]/spec.md`
|
||||||
|
|
||||||
**Note**: This template is filled in by the `/speckit-plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
|
**Note**: This template is filled in by the `/speckit-plan` command. See `.agents/skills/plan.md` for the execution workflow.
|
||||||
|
|
||||||
## Summary
|
## Summary
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-quizme
|
name: speckit-quizme
|
||||||
description: Challenge the specification with Socratic questioning to identify logical gaps, unhandled edge cases, and robustness issues.
|
description: Challenge the specification with Socratic questioning to identify logical gaps, unhandled edge cases, and robustness issues.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
handoffs:
|
handoffs:
|
||||||
- label: Clarify Spec Requirements
|
- label: Clarify Spec Requirements
|
||||||
agent: speckit-clarify
|
agent: speckit-clarify
|
||||||
@@ -65,3 +65,15 @@ Execution steps:
|
|||||||
- **Be a Skeptic**: Don't assume the happy path works.
|
- **Be a Skeptic**: Don't assume the happy path works.
|
||||||
- **Focus on "When" and "If"**: When high load, If network drops, When concurrent edits.
|
- **Focus on "When" and "If"**: When high load, If network drops, When concurrent edits.
|
||||||
- **Don't be annoying**: Focus on _critical_ flaws, not nitpicks.
|
- **Don't be annoying**: Focus on _critical_ flaws, not nitpicks.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-reviewer
|
name: speckit-reviewer
|
||||||
description: Perform code review with actionable feedback and suggestions.
|
description: Perform code review with actionable feedback and suggestions.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on: []
|
depends-on: []
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -142,3 +142,15 @@ Review code changes and provide structured feedback with severity levels.
|
|||||||
- **Be Balanced**: Mention what's good, not just what's wrong
|
- **Be Balanced**: Mention what's good, not just what's wrong
|
||||||
- **Prioritize**: Focus on real issues, not style nitpicks
|
- **Prioritize**: Focus on real issues, not style nitpicks
|
||||||
- **Be Educational**: Explain WHY something is an issue
|
- **Be Educational**: Explain WHY something is an issue
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-security-audit
|
name: speckit-security-audit
|
||||||
description: Perform a security-focused audit of the codebase against OWASP Top 10, CASL authorization, and LCBP3-DMS security requirements.
|
description: Perform a security-focused audit of the codebase against OWASP Top 10, CASL authorization, and LCBP3-DMS security requirements.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on:
|
depends-on:
|
||||||
- speckit-checker
|
- speckit-checker
|
||||||
---
|
---
|
||||||
@@ -12,16 +12,16 @@ You are the **Antigravity Security Sentinel**. Your mission is to identify secur
|
|||||||
|
|
||||||
## Task
|
## Task
|
||||||
|
|
||||||
Perform a comprehensive security audit covering OWASP Top 10, CASL permission enforcement, file upload safety, and project-specific security rules defined in `specs/06-Decision-Records/ADR-016-security.md`.
|
Perform a comprehensive security audit covering OWASP Top 10, CASL permission enforcement, file upload safety, and project-specific security rules defined in `specs/06-Decision-Records/ADR-016-security-authentication.md`.
|
||||||
|
|
||||||
## Context Loading
|
## Context Loading
|
||||||
|
|
||||||
Before auditing, load the security context:
|
Before auditing, load the security context:
|
||||||
|
|
||||||
1. Read `specs/06-Decision-Records/ADR-016-security.md` for project security decisions
|
1. Read `specs/06-Decision-Records/ADR-016-security-authentication.md` for project security decisions
|
||||||
2. Read `specs/05-Engineering-Guidelines/05-02-backend-guidelines.md` for backend security patterns
|
2. Read `specs/05-Engineering-Guidelines/05-02-backend-guidelines.md` for backend security patterns
|
||||||
3. Read `specs/03-Data-and-Storage/lcbp3-v1.7.0-seed-permissions.sql` for CASL permission definitions
|
3. Read `specs/03-Data-and-Storage/lcbp3-v1.8.0-seed-permissions.sql` for CASL permission definitions
|
||||||
4. Read `GEMINI.md` for security rules (Section: Security & Integrity Rules)
|
4. Read `AGENTS.md` for security rules (Section: Security Rules Non-Negotiable + Security & Integrity Audit Protocol)
|
||||||
|
|
||||||
## Execution Steps
|
## Execution Steps
|
||||||
|
|
||||||
@@ -44,7 +44,7 @@ Scan the `backend/src/` directory for each OWASP category:
|
|||||||
|
|
||||||
### Phase 2: CASL Authorization Audit
|
### Phase 2: CASL Authorization Audit
|
||||||
|
|
||||||
1. **Load permission matrix** from `specs/03-Data-and-Storage/lcbp3-v1.7.0-seed-permissions.sql`
|
1. **Load permission matrix** from `specs/03-Data-and-Storage/lcbp3-v1.8.0-seed-permissions.sql`
|
||||||
2. **Scan all controllers** for `@UseGuards(CaslAbilityGuard)` coverage:
|
2. **Scan all controllers** for `@UseGuards(CaslAbilityGuard)` coverage:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -197,3 +197,15 @@ Generate a structured report:
|
|||||||
- **No False Confidence**: If a check is inconclusive, mark it as "⚠️ Needs Manual Review" rather than passing.
|
- **No False Confidence**: If a check is inconclusive, mark it as "⚠️ Needs Manual Review" rather than passing.
|
||||||
- **LCBP3-Specific**: Prioritize project-specific rules (idempotency, ClamAV, Redlock) over generic checks.
|
- **LCBP3-Specific**: Prioritize project-specific rules (idempotency, ClamAV, Redlock) over generic checks.
|
||||||
- **Frontend Too**: If scope includes frontend, also check for XSS in React components, unescaped user data, and exposed API keys.
|
- **Frontend Too**: If scope includes frontend, also check for XSS in React components, unescaped user data, and exposed API keys.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-specify
|
name: speckit-specify
|
||||||
description: Create or update the feature specification from a natural language feature description.
|
description: Create or update the feature specification from a natural language feature description.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
handoffs:
|
handoffs:
|
||||||
- label: Build Technical Plan
|
- label: Build Technical Plan
|
||||||
agent: speckit-plan
|
agent: speckit-plan
|
||||||
@@ -64,8 +64,8 @@ Given that feature description, do this:
|
|||||||
|
|
||||||
d. Run the script `../scripts/bash/create-new-feature.sh --json "{{args}}"` with the calculated number and short-name:
|
d. Run the script `../scripts/bash/create-new-feature.sh --json "{{args}}"` with the calculated number and short-name:
|
||||||
- Pass `--number N+1` and `--short-name "your-short-name"` along with the feature description
|
- Pass `--number N+1` and `--short-name "your-short-name"` along with the feature description
|
||||||
- Bash example: `.specify/scripts/bash/create-new-feature.sh --json "{{args}}" --json --number 5 --short-name "user-auth" "Add user authentication"`
|
- Bash example: `.agents/scripts/bash/create-new-feature.sh --json "{{args}}" --number 5 --short-name "user-auth" "Add user authentication"`
|
||||||
- PowerShell example: `.specify/scripts/bash/create-new-feature.sh --json "{{args}}" -Json -Number 5 -ShortName "user-auth" "Add user authentication"`
|
- PowerShell example: `.agents/scripts/powershell/create-new-feature.ps1 -Json -Args '{{args}}' -Number 5 -ShortName "user-auth" "Add user authentication"`
|
||||||
|
|
||||||
**IMPORTANT**:
|
**IMPORTANT**:
|
||||||
- Check all three sources (remote branches, local branches, specs directories) to find the highest number
|
- Check all three sources (remote branches, local branches, specs directories) to find the highest number
|
||||||
@@ -262,3 +262,15 @@ Success criteria must be:
|
|||||||
- "Database can handle 1000 TPS" (implementation detail, use user-facing metric)
|
- "Database can handle 1000 TPS" (implementation detail, use user-facing metric)
|
||||||
- "React components render efficiently" (framework-specific)
|
- "React components render efficiently" (framework-specific)
|
||||||
- "Redis cache hit rate above 80%" (technology-specific)
|
- "Redis cache hit rate above 80%" (technology-specific)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../\_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-status
|
name: speckit-status
|
||||||
description: Display a dashboard showing feature status, completion percentage, and blockers.
|
description: Display a dashboard showing feature status, completion percentage, and blockers.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on: []
|
depends-on: []
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -109,3 +109,15 @@ Generate a dashboard view of all features and their completion status.
|
|||||||
- **Be Visual**: Use progress bars and tables
|
- **Be Visual**: Use progress bars and tables
|
||||||
- **Be Actionable**: Every status should have a "next action"
|
- **Be Actionable**: Every status should have a "next action"
|
||||||
- **Be Fast**: Cache nothing, always recalculate
|
- **Be Fast**: Cache nothing, always recalculate
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-tasks
|
name: speckit-tasks
|
||||||
description: Generate an actionable, dependency-ordered tasks.md for the feature based on available design artifacts.
|
description: Generate an actionable, dependency-ordered tasks.md for the feature based on available design artifacts.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on:
|
depends-on:
|
||||||
- speckit-plan
|
- speckit-plan
|
||||||
handoffs:
|
handoffs:
|
||||||
@@ -145,3 +145,15 @@ Every task MUST strictly follow this format:
|
|||||||
- Within each story: Tests (if requested) → Models → Services → Endpoints → Integration
|
- Within each story: Tests (if requested) → Models → Services → Endpoints → Integration
|
||||||
- Each phase should be a complete, independently testable increment
|
- Each phase should be a complete, independently testable increment
|
||||||
- **Final Phase**: Polish & Cross-Cutting Concerns
|
- **Final Phase**: Polish & Cross-Cutting Concerns
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-taskstoissues
|
name: speckit-taskstoissues
|
||||||
description: Convert existing tasks into actionable, dependency-ordered issues for the feature based on available design artifacts.
|
description: Convert existing tasks into actionable, dependency-ordered issues for the feature based on available design artifacts.
|
||||||
version: 1.1.0
|
version: 1.8.9
|
||||||
depends-on:
|
depends-on:
|
||||||
- speckit-tasks
|
- speckit-tasks
|
||||||
tools: ['github/github-mcp-server/issue_write']
|
tools: ['github/github-mcp-server/issue_write']
|
||||||
@@ -204,3 +204,15 @@ Convert all tasks from `tasks.md` into well-structured issues on the appropriate
|
|||||||
- **Label Consistency**: Use a consistent label taxonomy across all issues
|
- **Label Consistency**: Use a consistent label taxonomy across all issues
|
||||||
- **Platform Safety**: Never create issues on repos that don't match the git remote
|
- **Platform Safety**: Never create issues on repos that don't match the git remote
|
||||||
- **Dry Run Support**: Always support `--dry-run` to preview before creating
|
- **Dry Run Support**: Always support `--dry-run` to preview before creating
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-tester
|
name: speckit-tester
|
||||||
description: Execute tests, measure coverage, and report results.
|
description: Execute tests, measure coverage, and report results.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on: []
|
depends-on: []
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -120,3 +120,15 @@ Detect the project's test framework, execute tests, and generate a comprehensive
|
|||||||
- **Preserve Output**: Keep full test output for debugging
|
- **Preserve Output**: Keep full test output for debugging
|
||||||
- **Be Helpful**: Suggest fixes for common failure patterns
|
- **Be Helpful**: Suggest fixes for common failure patterns
|
||||||
- **Respect Timeouts**: Set reasonable timeout (5 min default)
|
- **Respect Timeouts**: Set reasonable timeout (5 min default)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
---
|
---
|
||||||
name: speckit-validate
|
name: speckit-validate
|
||||||
description: Validate that implementation matches specification requirements.
|
description: Validate that implementation matches specification requirements.
|
||||||
version: 1.0.0
|
version: 1.8.9
|
||||||
depends-on:
|
depends-on:
|
||||||
- speckit-implement
|
- speckit-implement
|
||||||
---
|
---
|
||||||
@@ -92,3 +92,15 @@ Post-implementation validation that compares code against spec requirements.
|
|||||||
- **Be Fair**: Semantic matching, not just keyword matching
|
- **Be Fair**: Semantic matching, not just keyword matching
|
||||||
- **Be Actionable**: Every gap should have a clear fix recommendation
|
- **Be Actionable**: Every gap should have a clear fix recommendation
|
||||||
- **Don't Block on Style**: Focus on functional coverage, not code style
|
- **Don't Block on Style**: Focus on functional coverage, not code style
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## LCBP3-DMS Context (MUST LOAD)
|
||||||
|
|
||||||
|
Before executing, load **[../_LCBP3-CONTEXT.md](../_LCBP3-CONTEXT.md)** to get:
|
||||||
|
|
||||||
|
- Canonical rule sources (AGENTS.md, specs/06-Decision-Records/, specs/05-Engineering-Guidelines/)
|
||||||
|
- Tier 1 non-negotiables (ADR-019 UUID, ADR-009 schema, ADR-016 security, ADR-002 numbering, ADR-008 BullMQ, ADR-018/020 AI boundary, ADR-007 errors)
|
||||||
|
- Domain glossary (Correspondence / RFA / Transmittal / Circulation)
|
||||||
|
- Helper script real paths
|
||||||
|
- Commit checklist
|
||||||
@@ -10,27 +10,27 @@ This meta-workflow orchestrates the **complete development lifecycle**, from spe
|
|||||||
## Preparation Phase (Steps 1-5)
|
## Preparation Phase (Steps 1-5)
|
||||||
|
|
||||||
1. **Specify** (`/speckit.specify`):
|
1. **Specify** (`/speckit.specify`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.specify/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-specify/SKILL.md`
|
||||||
- Execute with user's feature description
|
- Execute with user's feature description
|
||||||
- Creates: `spec.md`
|
- Creates: `spec.md`
|
||||||
|
|
||||||
2. **Clarify** (`/speckit.clarify`):
|
2. **Clarify** (`/speckit.clarify`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.clarify/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-clarify/SKILL.md`
|
||||||
- Execute to resolve ambiguities
|
- Execute to resolve ambiguities
|
||||||
- Updates: `spec.md`
|
- Updates: `spec.md`
|
||||||
|
|
||||||
3. **Plan** (`/speckit.plan`):
|
3. **Plan** (`/speckit.plan`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.plan/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-plan/SKILL.md`
|
||||||
- Execute to create technical design
|
- Execute to create technical design
|
||||||
- Creates: `plan.md`
|
- Creates: `plan.md`
|
||||||
|
|
||||||
4. **Tasks** (`/speckit.tasks`):
|
4. **Tasks** (`/speckit.tasks`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.tasks/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-tasks/SKILL.md`
|
||||||
- Execute to generate task breakdown
|
- Execute to generate task breakdown
|
||||||
- Creates: `tasks.md`
|
- Creates: `tasks.md`
|
||||||
|
|
||||||
5. **Analyze** (`/speckit.analyze`):
|
5. **Analyze** (`/speckit.analyze`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.analyze/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-analyze/SKILL.md`
|
||||||
- Execute to validate consistency across spec, plan, and tasks
|
- Execute to validate consistency across spec, plan, and tasks
|
||||||
- Output: Analysis report
|
- Output: Analysis report
|
||||||
- **Gate**: If critical issues found, stop and fix before proceeding
|
- **Gate**: If critical issues found, stop and fix before proceeding
|
||||||
@@ -38,29 +38,29 @@ This meta-workflow orchestrates the **complete development lifecycle**, from spe
|
|||||||
## Implementation Phase (Steps 6-7)
|
## Implementation Phase (Steps 6-7)
|
||||||
|
|
||||||
6. **Implement** (`/speckit.implement`):
|
6. **Implement** (`/speckit.implement`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.implement/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-implement/SKILL.md`
|
||||||
- Execute all tasks from `tasks.md` with anti-regression protocols
|
- Execute all tasks from `tasks.md` with anti-regression protocols
|
||||||
- Output: Working implementation
|
- Output: Working implementation
|
||||||
|
|
||||||
7. **Check** (`/speckit.checker`):
|
7. **Check** (`/speckit.checker`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.checker/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-checker/SKILL.md`
|
||||||
- Run static analysis (linters, type checkers, security scanners)
|
- Run static analysis (linters, type checkers, security scanners)
|
||||||
- Output: Checker report
|
- Output: Checker report
|
||||||
|
|
||||||
## Verification Phase (Steps 8-10)
|
## Verification Phase (Steps 8-10)
|
||||||
|
|
||||||
8. **Test** (`/speckit.tester`):
|
8. **Test** (`/speckit.tester`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.tester/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-tester/SKILL.md`
|
||||||
- Run tests with coverage
|
- Run tests with coverage
|
||||||
- Output: Test + coverage report
|
- Output: Test + coverage report
|
||||||
|
|
||||||
9. **Review** (`/speckit.reviewer`):
|
9. **Review** (`/speckit.reviewer`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.reviewer/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-reviewer/SKILL.md`
|
||||||
- Perform code review
|
- Perform code review
|
||||||
- Output: Review report with findings
|
- Output: Review report with findings
|
||||||
|
|
||||||
10. **Validate** (`/speckit.validate`):
|
10. **Validate** (`/speckit.validate`):
|
||||||
- Use the `view_file` tool to read: `.agents/skills/speckit.validate/SKILL.md`
|
- Use the `view_file` tool to read: `.agents/skills/speckit-validate/SKILL.md`
|
||||||
- Verify implementation matches spec requirements
|
- Verify implementation matches spec requirements
|
||||||
- Output: Validation report (pass/fail)
|
- Output: Validation report (pass/fail)
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Create or update the project constitution from interactive or provi
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.constitution/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-constitution/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ description: Create or update the feature specification from a natural language
|
|||||||
- This is typically the starting point of a new feature.
|
- This is typically the starting point of a new feature.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.specify/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-specify/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Identify underspecified areas in the current feature spec by asking
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.clarify/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-clarify/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Execute the implementation planning workflow using the plan templat
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.plan/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-plan/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Generate an actionable, dependency-ordered tasks.md for the feature
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.tasks/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-tasks/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ description: Perform a non-destructive cross-artifact consistency and quality an
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.analyze/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-analyze/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Execute the implementation plan by processing and executing all tas
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.implement/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-implement/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ description: Run static analysis tools and aggregate results.
|
|||||||
- The user may specify paths to check or run on entire project.
|
- The user may specify paths to check or run on entire project.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.checker/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-checker/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ description: Execute tests, measure coverage, and report results.
|
|||||||
- The user may specify test paths, options, or just run all tests.
|
- The user may specify test paths, options, or just run all tests.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.tester/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-tester/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Perform code review with actionable feedback and suggestions.
|
|||||||
- The user may specify files to review, "staged" for git staged changes, or "branch" for branch diff.
|
- The user may specify files to review, "staged" for git staged changes, or "branch" for branch diff.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.reviewer/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-reviewer/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Validate that implementation matches specification requirements.
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.validate/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-validate/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -0,0 +1,22 @@
|
|||||||
|
---
|
||||||
|
auto_execution_mode: 0
|
||||||
|
description: Perform a security-focused audit of the codebase against OWASP Top 10, CASL authorization, and LCBP3-DMS security requirements.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Workflow: speckit.security-audit
|
||||||
|
|
||||||
|
1. **Context Analysis**:
|
||||||
|
- The user may pass a scope hint: `backend`, `frontend`, `both`, or specific module paths (defaults to `both`).
|
||||||
|
|
||||||
|
2. **Load Skill**:
|
||||||
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-security-audit/SKILL.md`
|
||||||
|
- Also load `.agents/skills/_LCBP3-CONTEXT.md` for project-specific rules.
|
||||||
|
|
||||||
|
3. **Execute**:
|
||||||
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
- This is READ-ONLY — never modify code during the audit.
|
||||||
|
- Output a structured report with Critical / High / Medium / Low severity.
|
||||||
|
|
||||||
|
4. **On Error**:
|
||||||
|
- If scope unclear: Default to `both` (backend + frontend)
|
||||||
|
- If `specs/06-Decision-Records/ADR-016-security-authentication.md` missing: Warn and proceed with OWASP Top 10 + CASL checks only
|
||||||
@@ -9,20 +9,20 @@ This workflow orchestrates the sequential execution of the Speckit preparation p
|
|||||||
|
|
||||||
1. **Step 1: Specify (Skill 02)**
|
1. **Step 1: Specify (Skill 02)**
|
||||||
- Goal: Create or update the `spec.md` based on user input.
|
- Goal: Create or update the `spec.md` based on user input.
|
||||||
- Action: Read and execute `.agents/skills/speckit.specify/SKILL.md`.
|
- Action: Read and execute `.agents/skills/speckit-specify/SKILL.md`.
|
||||||
|
|
||||||
2. **Step 2: Clarify (Skill 03)**
|
2. **Step 2: Clarify (Skill 03)**
|
||||||
- Goal: Refine the `spec.md` by identifying and resolving ambiguities.
|
- Goal: Refine the `spec.md` by identifying and resolving ambiguities.
|
||||||
- Action: Read and execute `.agents/skills/speckit.clarify/SKILL.md`.
|
- Action: Read and execute `.agents/skills/speckit-clarify/SKILL.md`.
|
||||||
|
|
||||||
3. **Step 3: Plan (Skill 04)**
|
3. **Step 3: Plan (Skill 04)**
|
||||||
- Goal: Generate `plan.md` from the finalized spec.
|
- Goal: Generate `plan.md` from the finalized spec.
|
||||||
- Action: Read and execute `.agents/skills/speckit.plan/SKILL.md`.
|
- Action: Read and execute `.agents/skills/speckit-plan/SKILL.md`.
|
||||||
|
|
||||||
4. **Step 4: Tasks (Skill 05)**
|
4. **Step 4: Tasks (Skill 05)**
|
||||||
- Goal: Generate actionable `tasks.md` from the plan.
|
- Goal: Generate actionable `tasks.md` from the plan.
|
||||||
- Action: Read and execute `.agents/skills/speckit.tasks/SKILL.md`.
|
- Action: Read and execute `.agents/skills/speckit-tasks/SKILL.md`.
|
||||||
|
|
||||||
5. **Step 5: Analyze (Skill 06)**
|
5. **Step 5: Analyze (Skill 06)**
|
||||||
- Goal: Validate consistency across all design artifacts (spec, plan, tasks).
|
- Goal: Validate consistency across all design artifacts (spec, plan, tasks).
|
||||||
- Action: Read and execute `.agents/skills/speckit.analyze/SKILL.md`.
|
- Action: Read and execute `.agents/skills/speckit-analyze/SKILL.md`.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Generate a custom checklist for the current feature based on user r
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.checklist/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-checklist/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Compare two versions of a spec or plan to highlight changes.
|
|||||||
- The user has provided an input prompt (optional file paths or version references).
|
- The user has provided an input prompt (optional file paths or version references).
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.diff/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-diff/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ description: Migrate existing projects into the speckit structure by generating
|
|||||||
- The user has provided an input prompt (path to analyze, feature name).
|
- The user has provided an input prompt (path to analyze, feature name).
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.migrate/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-migrate/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ description: Challenge the specification with Socratic questioning to identify l
|
|||||||
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
- The user has provided an input prompt. Treat this as the primary input for the skill.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.quizme/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-quizme/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ description: Display a dashboard showing feature status, completion percentage,
|
|||||||
- The user may optionally specify a feature to focus on.
|
- The user may optionally specify a feature to focus on.
|
||||||
|
|
||||||
2. **Load Skill**:
|
2. **Load Skill**:
|
||||||
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit.status/SKILL.md`
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-status/SKILL.md`
|
||||||
|
|
||||||
3. **Execute**:
|
3. **Execute**:
|
||||||
- Follow the instructions in the `SKILL.md` exactly.
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
|||||||
@@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
auto_execution_mode: 0
|
||||||
|
description: Convert existing tasks into actionable, dependency-ordered issues on Gitea for the current feature.
|
||||||
|
---
|
||||||
|
|
||||||
|
# Workflow: speckit.taskstoissues
|
||||||
|
|
||||||
|
1. **Context Analysis**:
|
||||||
|
- The user may pass filters (e.g., phase, priority). Default: convert all pending tasks.
|
||||||
|
|
||||||
|
2. **Load Skill**:
|
||||||
|
- Use the `view_file` tool to read the skill file at: `.agents/skills/speckit-taskstoissues/SKILL.md`
|
||||||
|
- Also load `.agents/skills/_LCBP3-CONTEXT.md` for project conventions (labels, commit format).
|
||||||
|
|
||||||
|
3. **Execute**:
|
||||||
|
- Follow the instructions in the `SKILL.md` exactly.
|
||||||
|
- Use Gitea API (not GitHub) — target `git.np-dms.work/np-dms/lcbp3`.
|
||||||
|
- Apply LCBP3 labels: `spec`, `adr`, `security`, `ux`, `backend`, `frontend`, `schema`, etc.
|
||||||
|
- Use commit-format-compatible issue titles (per `specs/05-Engineering-Guidelines/05-05-git-conventions.md`).
|
||||||
|
|
||||||
|
4. **On Error**:
|
||||||
|
- If `tasks.md` missing: Run `/05-speckit.tasks` first
|
||||||
|
- If Gitea credentials missing: Report to user and provide manual issue-creation template
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
# NAP-DMS Project Context & Rules
|
# NAP-DMS Project Context & Rules
|
||||||
|
|
||||||
- For: Windsurf Cascade (and compatible: Codex CLI, opencode, Amp, Antigravity, AGENTS.md tools)
|
- For: Windsurf Cascade (and compatible: Codex CLI, opencode, Amp, Antigravity, AGENTS.md tools)
|
||||||
- Version: 1.8.7 | Last synced from repo: 2026-04-14
|
- Version: 1.8.9 | Last synced from repo: 2026-04-22
|
||||||
- Repo: [https://git.np-dms.work/np-dms/lcbp3](https://git.np-dms.work/np-dms/lcbp3)
|
- Repo: [https://git.np-dms.work/np-dms/lcbp3](https://git.np-dms.work/np-dms/lcbp3)
|
||||||
|
- Skill pack: `.agents/skills/` (v1.8.9, 20 skills) — see [`skills/README.md`](./.agents/skills/README.md) + [`skills/_LCBP3-CONTEXT.md`](./.agents/skills/_LCBP3-CONTEXT.md)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -380,26 +381,30 @@ This file is a **quick reference**. For detailed information:
|
|||||||
|
|
||||||
- **Architecture:** `specs/02-architecture/`
|
- **Architecture:** `specs/02-architecture/`
|
||||||
- **Requirements:** `specs/01-requirements/`
|
- **Requirements:** `specs/01-requirements/`
|
||||||
- **Data & Storage:** `specs/03-Data-and-Storage/`
|
- **Data & Storage:** `specs/03-Data-and-Storage/` (canonical schema + `deltas/` incremental SQL per ADR-009)
|
||||||
- **Engineering Guidelines:** `specs/05-Engineering-Guidelines/`
|
- **Engineering Guidelines:** `specs/05-Engineering-Guidelines/`
|
||||||
- **Decision Records:** `specs/06-Decision-Records/`
|
- **Decision Records:** `specs/06-Decision-Records/`
|
||||||
- **Infrastructure:** `specs/04-Infrastructure-OPS/`
|
- **Infrastructure:** `specs/04-Infrastructure-OPS/`
|
||||||
|
- **Agent Skill Pack:** `.agents/skills/` (NestJS/Next.js rules + 18 Speckit workflow skills)
|
||||||
|
- **Helper Scripts:** `.agents/scripts/{bash,powershell}/` (audit, validate, prerequisites, setup-plan)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🔄 Change Log
|
## 🔄 Change Log
|
||||||
|
|
||||||
| Version | Date | Changes | Updated By |
|
| Version | Date | Changes | Updated By |
|
||||||
| ------- | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------- | -------------- |
|
| ------- | ---------- | ------------------------------------------------------------------------------------------------------------------------------------------------- | -------------- |
|
||||||
| 1.8.7 | 2026-04-14 | + ADR-021 Workflow Context integration, + ADR-021 Integration Work tier, + Transmittal/Circulation context triggers, updated ADR-020 status | Windsurf AI |
|
| 1.8.9 | 2026-04-22 | `.agents/skills/` LCBP3-native rebuild (20 skills @ v1.8.9) + `_LCBP3-CONTEXT.md` appendix + `specs/03-Data-and-Storage/deltas/` + AGENTS.md sync | Windsurf AI |
|
||||||
| 1.8.6 | 2026-04-10 | + DMS Workflow Engine Protocol, + Security & Integrity Audit Protocol, + 2 Context-Aware Triggers, ADR Status column, Forbidden Why column | Human Dev |
|
| 1.8.8 | 2026-04-14 | Workflow attachments (ADR-021) + step-attachment envelope fields | Windsurf AI |
|
||||||
| 1.8.5 | 2026-04-04 | Added ADR-007 error handling, ADR-020 AI integration, updated security rules | Windsurf AI |
|
| 1.8.7 | 2026-04-14 | + ADR-021 Workflow Context integration, + ADR-021 Integration Work tier, + Transmittal/Circulation context triggers, updated ADR-020 status | Windsurf AI |
|
||||||
| 1.8.4 | 2026-03-24 | Phase 5.4→✅ DONE, Tailwind 3.4.3, ADR count(16), MariaDB UUID note | Windsurf AI |
|
| 1.8.6 | 2026-04-10 | + DMS Workflow Engine Protocol, + Security & Integrity Audit Protocol, + 2 Context-Aware Triggers, ADR Status column, Forbidden Why column | Human Dev |
|
||||||
| 1.8.3 | 2026-03-21 | + Rule Enforcement Tiers (🔴🟡🟢), + Tiered Development Flow | Human Dev + AI |
|
| 1.8.5 | 2026-04-04 | Added ADR-007 error handling, ADR-020 AI integration, updated security rules | Windsurf AI |
|
||||||
| 1.8.2 | 2026-03-21 | + Context Triggers, + Code Snippets, + Error Handling, + i18n | Human Dev + AI |
|
| 1.8.4 | 2026-03-24 | Phase 5.4→✅ DONE, Tailwind 3.4.3, ADR count(16), MariaDB UUID note | Windsurf AI |
|
||||||
| 1.8.1 | 2026-03-21 | + ADR-019 UUID patterns, + Phase 5.4 pending files | Claude Sonnet |
|
| 1.8.3 | 2026-03-21 | + Rule Enforcement Tiers (🔴🟡🟢), + Tiered Development Flow | Human Dev + AI |
|
||||||
| 1.8.0 | 2026-03-19 | + Security overrides, + UAT criteria reference | Human Dev |
|
| 1.8.2 | 2026-03-21 | + Context Triggers, + Code Snippets, + Error Handling, + i18n | Human Dev + AI |
|
||||||
| 1.7.2 | 2026-03-15 | + AI Boundary rules (ADR-018) | Gemini Pro |
|
| 1.8.1 | 2026-03-21 | + ADR-019 UUID patterns, + Phase 5.4 pending files | Claude Sonnet |
|
||||||
|
| 1.8.0 | 2026-03-19 | + Security overrides, + UAT criteria reference | Human Dev |
|
||||||
|
| 1.7.2 | 2026-03-15 | + AI Boundary rules (ADR-018) | Gemini Pro |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
+46
-1
@@ -58,7 +58,8 @@ specs/
|
|||||||
│ ├── lcbp3-v1.8.0-seed-basic.sql # Master Data Seed
|
│ ├── lcbp3-v1.8.0-seed-basic.sql # Master Data Seed
|
||||||
│ ├── lcbp3-v1.8.0-seed-permissions.sql # RBAC Permissions Seed
|
│ ├── lcbp3-v1.8.0-seed-permissions.sql # RBAC Permissions Seed
|
||||||
│ ├── 03-01-data-dictionary.md
|
│ ├── 03-01-data-dictionary.md
|
||||||
│ └── 03-06-migration-business-scope.md # Gap 7: Migration Scope [★ NEW]
|
│ ├── 03-06-migration-business-scope.md # Gap 7: Migration Scope [★ NEW]
|
||||||
|
│ └── deltas/ # Incremental SQL (ADR-009) [★ v1.8.9]
|
||||||
│
|
│
|
||||||
├── 04-Infrastructure-OPS/ # Deployment & Operations (9 docs)
|
├── 04-Infrastructure-OPS/ # Deployment & Operations (9 docs)
|
||||||
│ ├── README.md
|
│ ├── README.md
|
||||||
@@ -713,6 +714,50 @@ Create `.markdownlint.json`:
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## 🤖 AI-Assisted Contributions
|
||||||
|
|
||||||
|
โปรเจกต์นี้รองรับ AI agents (Windsurf Cascade, Codex CLI, opencode, Amp, Antigravity) ในการเขียน / review / refactor โค้ด — ผ่านคู่มือกลางคือ [`AGENTS.md`](./AGENTS.md) และชุดทักษะใน [`.agents/skills/`](./.agents/skills/)
|
||||||
|
|
||||||
|
### Canonical Rule Sources (อ่านตามลำดับนี้)
|
||||||
|
|
||||||
|
1. **[`AGENTS.md`](./AGENTS.md)** — quick-reference rules + change log (supersedes legacy `GEMINI.md`)
|
||||||
|
2. **[`.agents/skills/_LCBP3-CONTEXT.md`](./.agents/skills/_LCBP3-CONTEXT.md)** — shared context loaded by every speckit-\* skill
|
||||||
|
3. **[`.agents/skills/README.md`](./.agents/skills/README.md)** — skill-pack layout + Windsurf invocation guide
|
||||||
|
4. `specs/06-Decision-Records/` (โดยเฉพาะ ADR-019 — UUID **March 2026 pattern**)
|
||||||
|
5. `specs/05-Engineering-Guidelines/` (backend / frontend / testing / i18n / git conventions)
|
||||||
|
|
||||||
|
### Invocation (Windsurf)
|
||||||
|
|
||||||
|
ใช้ slash commands ด้านล่าง — `.windsurf/workflows/*.md` ห่อหุ้ม [`.agents/skills/speckit-*`](./.agents/skills/) ไว้ให้:
|
||||||
|
|
||||||
|
- `/02-speckit.specify` → spec.md
|
||||||
|
- `/04-speckit.plan` → plan.md + data-model.md + contracts/
|
||||||
|
- `/05-speckit.tasks` → tasks.md
|
||||||
|
- `/07-speckit.implement` → execute tasks (with Ironclad Anti-Regression Protocols)
|
||||||
|
- `/10-speckit.reviewer` → code review (Tier 1/2/3 classification)
|
||||||
|
- `/12-speckit.security-audit` → OWASP + CASL + LCBP3-specific
|
||||||
|
|
||||||
|
### Health Checks
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Version + frontmatter consistency
|
||||||
|
bash ./.agents/scripts/bash/validate-versions.sh
|
||||||
|
pwsh ./.agents/scripts/powershell/validate-versions.ps1
|
||||||
|
|
||||||
|
# Full skill audit (20 skills)
|
||||||
|
bash ./.agents/scripts/bash/audit-skills.sh
|
||||||
|
pwsh ./.agents/scripts/powershell/audit-skills.ps1
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔴 Tier 1 Non-Negotiables (AI must enforce)
|
||||||
|
|
||||||
|
- **ADR-019 UUID** — `publicId` exposed directly; ห้าม `parseInt`/`Number`/`+` บน UUID; ห้าม `id ?? ''` fallback; ห้ามใช้ `@Expose({ name: 'id' })` rename
|
||||||
|
- **ADR-009 Schema** — แก้ `lcbp3-v1.8.0-schema-02-tables.sql` โดยตรง + เพิ่ม delta ที่ `specs/03-Data-and-Storage/deltas/`; ห้าม TypeORM migrations
|
||||||
|
- **ADR-016 Security** — CASL + `Idempotency-Key` + ClamAV two-phase upload
|
||||||
|
- **ADR-018/020 AI Boundary** — Ollama on Admin Desktop only; human-in-the-loop validation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## 🎯 Quality Standards
|
## 🎯 Quality Standards
|
||||||
|
|
||||||
### Definition of Done (DoD) สำหรับ Spec Changes
|
### Definition of Done (DoD) สำหรับ Spec Changes
|
||||||
|
|||||||
@@ -325,9 +325,9 @@ lcbp3-dms/
|
|||||||
├── .vscode/ # VS Code settings and extensions
|
├── .vscode/ # VS Code settings and extensions
|
||||||
├── .husky/ # Git hooks
|
├── .husky/ # Git hooks
|
||||||
│
|
│
|
||||||
├── AGENTS.md # AI agent rules & project context
|
├── AGENTS.md # AI agent rules & project context (v1.8.9) [★ primary]
|
||||||
├── GEMINI.md # AI coding guidelines
|
├── GEMINI.md # AI coding guidelines [legacy — kept for backward compat]
|
||||||
├── CONTRIBUTING.md # Contribution guidelines
|
├── CONTRIBUTING.md # Contribution guidelines (+ AI-Assisted section)
|
||||||
├── CHANGELOG.md # Version history
|
├── CHANGELOG.md # Version history
|
||||||
├── README.md # This file
|
├── README.md # This file
|
||||||
├── package.json # Root package.json (monorepo)
|
├── package.json # Root package.json (monorepo)
|
||||||
@@ -739,6 +739,22 @@ docker-compose -f docker-compose.yml up -d
|
|||||||
- Development Process
|
- Development Process
|
||||||
- Pull Request Process
|
- Pull Request Process
|
||||||
- Coding Standards
|
- Coding Standards
|
||||||
|
- **AI-Assisted Contributions** (AGENTS.md + `.agents/skills/` skill pack + Windsurf slash commands)
|
||||||
|
|
||||||
|
### 🤖 For AI Agents (Windsurf Cascade, Codex CLI, opencode, Amp, Antigravity)
|
||||||
|
|
||||||
|
ไฟล์กลางสำหรับ AI assistants:
|
||||||
|
|
||||||
|
| Priority | File | Purpose |
|
||||||
|
| -------- | ------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------- |
|
||||||
|
| 1 | [`AGENTS.md`](./AGENTS.md) | Quick-reference rules (Tier 1/2/3 enforcement, ADR-019 March 2026 pattern, forbidden actions) |
|
||||||
|
| 2 | [`.agents/skills/_LCBP3-CONTEXT.md`](./.agents/skills/_LCBP3-CONTEXT.md) | Shared context appendix injected into every speckit-\* skill |
|
||||||
|
| 3 | [`.agents/skills/README.md`](./.agents/skills/README.md) | Skill-pack layout + slash-command invocation guide |
|
||||||
|
| 4 | `specs/06-Decision-Records/` | 22 ADRs (architectural decisions) |
|
||||||
|
|
||||||
|
**Slash commands:** `/02-speckit.specify` → `/04-speckit.plan` → `/05-speckit.tasks` → `/07-speckit.implement` → `/10-speckit.reviewer` → `/12-speckit.security-audit`
|
||||||
|
|
||||||
|
**Health checks:** `bash ./.agents/scripts/bash/audit-skills.sh` or `pwsh ./.agents/scripts/powershell/audit-skills.ps1`
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -769,7 +785,18 @@ This project is **Internal Use Only** - ลิขสิทธิ์เป็น
|
|||||||
|
|
||||||
## 🗺️ Roadmap
|
## 🗺️ Roadmap
|
||||||
|
|
||||||
### ✅ Version 1.8.9 (Apr 2026) — Infrastructure Hardening
|
### ✅ Version 1.8.9 (Apr 2026) — Infrastructure Hardening + Agent Skill Pack Rebuild
|
||||||
|
|
||||||
|
**Agent skill pack rebuilt (`.agents/skills/` @ v1.8.9) — 2026-04-22:**
|
||||||
|
|
||||||
|
- ✅ 20 skills standardized (2 best-practices + 18 speckit-\*) — shared `_LCBP3-CONTEXT.md` appendix
|
||||||
|
- ✅ ADR-019 drift removed: `publicId` exposed directly (no `@Expose({ name: 'id' })` rename); `id ?? ''` fallback eliminated
|
||||||
|
- ✅ Dead references cleaned: `GEMINI.md` → `AGENTS.md`; `.specify/memory/` → `AGENTS.md`; `v1.7.0` → `v1.8.0` schema
|
||||||
|
- ✅ New rules: workflow-engine (ADR-001/002/021), file-two-phase-upload (ADR-016), ai-boundary (ADR-018/020), no-typeorm-migrations (ADR-009), i18n, two-phase-upload (frontend)
|
||||||
|
- ✅ `.windsurf/workflows/` path fixes (18 files) + 2 new wrappers (`12-speckit.security-audit`, `util-speckit.taskstoissues`)
|
||||||
|
- ✅ `specs/03-Data-and-Storage/deltas/` directory bootstrapped (ADR-009 incremental SQL)
|
||||||
|
- ✅ Regenerated `nestjs-best-practices/AGENTS.md` (188KB, 45 rules × 11 categories incl. LCBP3 project-specific)
|
||||||
|
- ✅ Helper scripts fixed (bash + pwsh): BASE_DIR, CRLF, color enum, version extraction
|
||||||
|
|
||||||
**Docker Compose stacks fully hardened — 27 findings across 4 phases:**
|
**Docker Compose stacks fully hardened — 27 findings across 4 phases:**
|
||||||
|
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import { UserModule } from '../../modules/user/user.module';
|
|||||||
TypeOrmModule.forFeature([Attachment]),
|
TypeOrmModule.forFeature([Attachment]),
|
||||||
ScheduleModule.forRoot(), // ✅ เปิดใช้งาน Cron Job],
|
ScheduleModule.forRoot(), // ✅ เปิดใช้งาน Cron Job],
|
||||||
UserModule,
|
UserModule,
|
||||||
BullModule.registerQueue({ name: 'rag:ocr' }),
|
BullModule.registerQueue({ name: 'rag-ocr' }),
|
||||||
],
|
],
|
||||||
controllers: [FileStorageController],
|
controllers: [FileStorageController],
|
||||||
providers: [
|
providers: [
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ export class FileStorageService {
|
|||||||
@InjectRepository(Attachment)
|
@InjectRepository(Attachment)
|
||||||
private attachmentRepository: Repository<Attachment>,
|
private attachmentRepository: Repository<Attachment>,
|
||||||
private configService: ConfigService,
|
private configService: ConfigService,
|
||||||
@Optional() @InjectQueue('rag:ocr') private readonly ragOcrQueue?: Queue
|
@Optional() @InjectQueue('rag-ocr') private readonly ragOcrQueue?: Queue
|
||||||
) {
|
) {
|
||||||
// ใช้ env vars จาก docker-compose สำหรับ Production
|
// ใช้ env vars จาก docker-compose สำหรับ Production
|
||||||
// ถ้าไม่ได้กำหนดจะ fallback เป็น ./uploads/temp และ ./uploads/permanent
|
// ถ้าไม่ได้กำหนดจะ fallback เป็น ./uploads/temp และ ./uploads/permanent
|
||||||
@@ -180,7 +180,7 @@ export class FileStorageService {
|
|||||||
)
|
)
|
||||||
.catch((err: unknown) => {
|
.catch((err: unknown) => {
|
||||||
this.logger.error(
|
this.logger.error(
|
||||||
`Failed to enqueue rag:ocr for ${saved.publicId}`,
|
`Failed to enqueue rag-ocr for ${saved.publicId}`,
|
||||||
err instanceof Error ? err.stack : String(err)
|
err instanceof Error ? err.stack : String(err)
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import { Test, TestingModule } from '@nestjs/testing';
|
|||||||
|
|
||||||
import { IngestionService } from '../ingestion.service';
|
import { IngestionService } from '../ingestion.service';
|
||||||
|
|
||||||
const QUEUE_TOKEN = 'BullQueue_rag:ocr';
|
const QUEUE_TOKEN = 'BullQueue_rag-ocr';
|
||||||
|
|
||||||
const mockOcrQueue = {
|
const mockOcrQueue = {
|
||||||
getJob: jest.fn(),
|
getJob: jest.fn(),
|
||||||
@@ -35,7 +35,7 @@ describe('IngestionService', () => {
|
|||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should enqueue rag:ocr job with attachmentPublicId as jobId', async () => {
|
it('should enqueue rag-ocr job with attachmentPublicId as jobId', async () => {
|
||||||
mockOcrQueue.getJob.mockResolvedValue(null);
|
mockOcrQueue.getJob.mockResolvedValue(null);
|
||||||
mockOcrQueue.add.mockResolvedValue({ id: baseJobData.attachmentPublicId });
|
mockOcrQueue.add.mockResolvedValue({ id: baseJobData.attachmentPublicId });
|
||||||
|
|
||||||
|
|||||||
@@ -17,10 +17,10 @@ export class DocumentChunk {
|
|||||||
@Column({ length: 20, name: 'doc_type' })
|
@Column({ length: 20, name: 'doc_type' })
|
||||||
docType!: string;
|
docType!: string;
|
||||||
|
|
||||||
@Column({ length: 100, name: 'doc_number', nullable: true })
|
@Column({ type: 'varchar', length: 100, name: 'doc_number', nullable: true })
|
||||||
docNumber!: string | null;
|
docNumber!: string | null;
|
||||||
|
|
||||||
@Column({ length: 20, nullable: true })
|
@Column({ type: 'varchar', length: 20, nullable: true })
|
||||||
revision!: string | null;
|
revision!: string | null;
|
||||||
|
|
||||||
@Column({ length: 50, name: 'project_code' })
|
@Column({ length: 50, name: 'project_code' })
|
||||||
@@ -36,7 +36,7 @@ export class DocumentChunk {
|
|||||||
})
|
})
|
||||||
classification!: 'PUBLIC' | 'INTERNAL' | 'CONFIDENTIAL';
|
classification!: 'PUBLIC' | 'INTERNAL' | 'CONFIDENTIAL';
|
||||||
|
|
||||||
@Column({ length: 20, nullable: true })
|
@Column({ type: 'varchar', length: 20, nullable: true })
|
||||||
version!: string | null;
|
version!: string | null;
|
||||||
|
|
||||||
@Column({ length: 100, name: 'embedding_model', default: 'nomic-embed-text' })
|
@Column({ length: 100, name: 'embedding_model', default: 'nomic-embed-text' })
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ import { OcrJobData } from './processors/ocr.processor';
|
|||||||
export class IngestionService {
|
export class IngestionService {
|
||||||
private readonly logger = new Logger(IngestionService.name);
|
private readonly logger = new Logger(IngestionService.name);
|
||||||
|
|
||||||
constructor(@InjectQueue('rag:ocr') private readonly ocrQueue: Queue) {}
|
constructor(@InjectQueue('rag-ocr') private readonly ocrQueue: Queue) {}
|
||||||
|
|
||||||
async enqueue(data: OcrJobData): Promise<void> {
|
async enqueue(data: OcrJobData): Promise<void> {
|
||||||
const jobId = data.attachmentPublicId;
|
const jobId = data.attachmentPublicId;
|
||||||
@@ -18,13 +18,13 @@ export class IngestionService {
|
|||||||
const state = await existing.getState();
|
const state = await existing.getState();
|
||||||
if (state === 'active' || state === 'waiting' || state === 'delayed') {
|
if (state === 'active' || state === 'waiting' || state === 'delayed') {
|
||||||
this.logger.log(
|
this.logger.log(
|
||||||
`rag:ocr job already queued for ${jobId} (state: ${state})`
|
`rag-ocr job already queued for ${jobId} (state: ${state})`
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.ocrQueue.add('ocr', data, { jobId });
|
await this.ocrQueue.add('ocr', data, { jobId });
|
||||||
this.logger.log(`Enqueued rag:ocr for attachment ${jobId}`);
|
this.logger.log(`Enqueued rag-ocr for attachment ${jobId}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import { EmbeddingJobData } from './thai-preprocess.processor';
|
|||||||
const CHUNK_SIZE = 512;
|
const CHUNK_SIZE = 512;
|
||||||
const CHUNK_OVERLAP = 50;
|
const CHUNK_OVERLAP = 50;
|
||||||
|
|
||||||
@Processor('rag:embedding')
|
@Processor('rag-embedding')
|
||||||
export class EmbeddingProcessor extends WorkerHost {
|
export class EmbeddingProcessor extends WorkerHost {
|
||||||
private readonly logger = new Logger(EmbeddingProcessor.name);
|
private readonly logger = new Logger(EmbeddingProcessor.name);
|
||||||
|
|
||||||
|
|||||||
@@ -20,12 +20,12 @@ export interface OcrJobData {
|
|||||||
classification: 'PUBLIC' | 'INTERNAL' | 'CONFIDENTIAL';
|
classification: 'PUBLIC' | 'INTERNAL' | 'CONFIDENTIAL';
|
||||||
}
|
}
|
||||||
|
|
||||||
@Processor('rag:ocr')
|
@Processor('rag-ocr')
|
||||||
export class OcrProcessor extends WorkerHost {
|
export class OcrProcessor extends WorkerHost {
|
||||||
private readonly logger = new Logger(OcrProcessor.name);
|
private readonly logger = new Logger(OcrProcessor.name);
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
@InjectQueue('rag:thai-preprocess') private readonly thaiQueue: Queue,
|
@InjectQueue('rag-thai-preprocess') private readonly thaiQueue: Queue,
|
||||||
@InjectRepository(DocumentChunk)
|
@InjectRepository(DocumentChunk)
|
||||||
private readonly chunkRepo: Repository<DocumentChunk>
|
private readonly chunkRepo: Repository<DocumentChunk>
|
||||||
) {
|
) {
|
||||||
@@ -40,7 +40,7 @@ export class OcrProcessor extends WorkerHost {
|
|||||||
});
|
});
|
||||||
if (existing > 0) {
|
if (existing > 0) {
|
||||||
this.logger.log(
|
this.logger.log(
|
||||||
`rag:ocr job already indexed for ${attachmentPublicId}, skipping`
|
`rag-ocr job already indexed for ${attachmentPublicId}, skipping`
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,14 +14,14 @@ export interface EmbeddingJobData extends ThaiPreprocessJobData {
|
|||||||
normalizedText: string;
|
normalizedText: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Processor('rag:thai-preprocess')
|
@Processor('rag-thai-preprocess')
|
||||||
export class ThaiPreprocessProcessor extends WorkerHost {
|
export class ThaiPreprocessProcessor extends WorkerHost {
|
||||||
private readonly logger = new Logger(ThaiPreprocessProcessor.name);
|
private readonly logger = new Logger(ThaiPreprocessProcessor.name);
|
||||||
private readonly thaiUrl: string;
|
private readonly thaiUrl: string;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private readonly configService: ConfigService,
|
private readonly configService: ConfigService,
|
||||||
@InjectQueue('rag:embedding') private readonly embeddingQueue: Queue
|
@InjectQueue('rag-embedding') private readonly embeddingQueue: Queue
|
||||||
) {
|
) {
|
||||||
super();
|
super();
|
||||||
this.thaiUrl = this.configService.get<string>(
|
this.thaiUrl = this.configService.get<string>(
|
||||||
|
|||||||
@@ -28,9 +28,9 @@ const DLQ_DEFAULTS = {
|
|||||||
UserModule,
|
UserModule,
|
||||||
TypeOrmModule.forFeature([DocumentChunk]),
|
TypeOrmModule.forFeature([DocumentChunk]),
|
||||||
BullModule.registerQueue(
|
BullModule.registerQueue(
|
||||||
{ name: 'rag:ocr', defaultJobOptions: DLQ_DEFAULTS },
|
{ name: 'rag-ocr', defaultJobOptions: DLQ_DEFAULTS },
|
||||||
{ name: 'rag:thai-preprocess', defaultJobOptions: DLQ_DEFAULTS },
|
{ name: 'rag-thai-preprocess', defaultJobOptions: DLQ_DEFAULTS },
|
||||||
{ name: 'rag:embedding', defaultJobOptions: DLQ_DEFAULTS }
|
{ name: 'rag-embedding', defaultJobOptions: DLQ_DEFAULTS }
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
controllers: [RagController],
|
controllers: [RagController],
|
||||||
|
|||||||
@@ -22,13 +22,13 @@
|
|||||||
// EDITOR SETTINGS
|
// EDITOR SETTINGS
|
||||||
// ========================================
|
// ========================================
|
||||||
|
|
||||||
"editor.fontSize": 16,
|
"editor.fontSize": 19,
|
||||||
"editor.tabSize": 2,
|
"editor.tabSize": 2,
|
||||||
"editor.lineHeight": 1.6,
|
"editor.lineHeight": 1.6,
|
||||||
"editor.rulers": [80, 120],
|
"editor.rulers": [80, 120],
|
||||||
"editor.minimap.enabled": true,
|
"editor.minimap.enabled": true,
|
||||||
"editor.minimap.sectionHeaderFontSize": 12,
|
"editor.minimap.sectionHeaderFontSize": 12,
|
||||||
"editor.renderWhitespace": "selection",
|
"editor.renderWhitespace": "none",
|
||||||
// "editor.renderWhitespace": "boundary",
|
// "editor.renderWhitespace": "boundary",
|
||||||
"editor.renderControlCharacters": true,
|
"editor.renderControlCharacters": true,
|
||||||
"editor.bracketPairColorization.enabled": true,
|
"editor.bracketPairColorization.enabled": true,
|
||||||
@@ -677,6 +677,7 @@
|
|||||||
"scm.alwaysShowActions": false,
|
"scm.alwaysShowActions": false,
|
||||||
"workbench.settings.alwaysShowAdvancedSettings": true,
|
"workbench.settings.alwaysShowAdvancedSettings": true,
|
||||||
"npm.packageManager": "pnpm",
|
"npm.packageManager": "pnpm",
|
||||||
|
"editor.mouseWheelZoom": true,
|
||||||
},
|
},
|
||||||
// ========================================
|
// ========================================
|
||||||
// LAUNCH CONFIGURATIONS
|
// LAUNCH CONFIGURATIONS
|
||||||
|
|||||||
@@ -0,0 +1,34 @@
|
|||||||
|
# Specification Quality Checklist: Infrastructure Operations & Deployment Automation
|
||||||
|
|
||||||
|
**Purpose**: Validate specification completeness and quality before proceeding to planning
|
||||||
|
**Created**: 2026-04-20
|
||||||
|
**Feature**: [Infrastructure Operations & Deployment Automation](../spec.md)
|
||||||
|
|
||||||
|
## Content Quality
|
||||||
|
|
||||||
|
- [x] No implementation details (languages, frameworks, APIs)
|
||||||
|
- [x] Focused on user value and business needs
|
||||||
|
- [x] Written for non-technical stakeholders
|
||||||
|
- [x] All mandatory sections completed
|
||||||
|
|
||||||
|
## Requirement Completeness
|
||||||
|
|
||||||
|
- [x] No [NEEDS CLARIFICATION] markers remain
|
||||||
|
- [x] Requirements are testable and unambiguous
|
||||||
|
- [x] Success criteria are measurable
|
||||||
|
- [x] Success criteria are technology-agnostic (no implementation details)
|
||||||
|
- [x] All acceptance scenarios are defined
|
||||||
|
- [x] Edge cases are identified
|
||||||
|
- [x] Scope is clearly bounded
|
||||||
|
- [x] Dependencies and assumptions identified
|
||||||
|
|
||||||
|
## Feature Readiness
|
||||||
|
|
||||||
|
- [x] All functional requirements have clear acceptance criteria
|
||||||
|
- [x] User scenarios cover primary flows
|
||||||
|
- [x] Feature meets measurable outcomes defined in Success Criteria
|
||||||
|
- [x] No implementation details leak into specification
|
||||||
|
|
||||||
|
## Notes
|
||||||
|
|
||||||
|
- Items marked incomplete require spec updates before `/speckit-clarify` or `/speckit-plan`
|
||||||
@@ -0,0 +1,500 @@
|
|||||||
|
openapi: 3.0.3
|
||||||
|
info:
|
||||||
|
title: Infrastructure Operations API
|
||||||
|
description: API for managing infrastructure operations, deployments, and monitoring
|
||||||
|
version: 1.0.0
|
||||||
|
contact:
|
||||||
|
name: Infrastructure Team
|
||||||
|
email: infra@np-dms.work
|
||||||
|
|
||||||
|
paths:
|
||||||
|
/deployments:
|
||||||
|
get:
|
||||||
|
summary: List all deployments
|
||||||
|
description: Retrieve status of all deployment environments
|
||||||
|
tags:
|
||||||
|
- Deployments
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: List of deployments retrieved successfully
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
deployments:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/Deployment'
|
||||||
|
|
||||||
|
post:
|
||||||
|
summary: Create new deployment
|
||||||
|
description: Initiate a new deployment to specified environment
|
||||||
|
tags:
|
||||||
|
- Deployments
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/DeploymentRequest'
|
||||||
|
responses:
|
||||||
|
'201':
|
||||||
|
description: Deployment initiated successfully
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/Deployment'
|
||||||
|
'400':
|
||||||
|
description: Invalid deployment request
|
||||||
|
'409':
|
||||||
|
description: Deployment already in progress
|
||||||
|
|
||||||
|
/deployments/{deploymentId}:
|
||||||
|
get:
|
||||||
|
summary: Get deployment details
|
||||||
|
description: Retrieve detailed information about a specific deployment
|
||||||
|
tags:
|
||||||
|
- Deployments
|
||||||
|
parameters:
|
||||||
|
- name: deploymentId
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Deployment details retrieved successfully
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/Deployment'
|
||||||
|
'404':
|
||||||
|
description: Deployment not found
|
||||||
|
|
||||||
|
patch:
|
||||||
|
summary: Update deployment status
|
||||||
|
description: Update deployment status or trigger rollback
|
||||||
|
tags:
|
||||||
|
- Deployments
|
||||||
|
parameters:
|
||||||
|
- name: deploymentId
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/DeploymentUpdate'
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Deployment updated successfully
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/Deployment'
|
||||||
|
'404':
|
||||||
|
description: Deployment not found
|
||||||
|
'409':
|
||||||
|
description: Invalid state transition
|
||||||
|
|
||||||
|
/backups:
|
||||||
|
get:
|
||||||
|
summary: List backup archives
|
||||||
|
description: Retrieve list of available backup archives
|
||||||
|
tags:
|
||||||
|
- Backups
|
||||||
|
parameters:
|
||||||
|
- name: status
|
||||||
|
in: query
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: [completed, in_progress, failed, validated]
|
||||||
|
- name: environment
|
||||||
|
in: query
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: List of backup archives retrieved successfully
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
backups:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/BackupArchive'
|
||||||
|
|
||||||
|
post:
|
||||||
|
summary: Create backup
|
||||||
|
description: Initiate a new backup operation
|
||||||
|
tags:
|
||||||
|
- Backups
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/BackupRequest'
|
||||||
|
responses:
|
||||||
|
'201':
|
||||||
|
description: Backup initiated successfully
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/BackupArchive'
|
||||||
|
'409':
|
||||||
|
description: Backup already in progress
|
||||||
|
|
||||||
|
/backups/{backupId}/restore:
|
||||||
|
post:
|
||||||
|
summary: Restore from backup
|
||||||
|
description: Initiate restore operation from specified backup
|
||||||
|
tags:
|
||||||
|
- Backups
|
||||||
|
parameters:
|
||||||
|
- name: backupId
|
||||||
|
in: path
|
||||||
|
required: true
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/RestoreRequest'
|
||||||
|
responses:
|
||||||
|
'202':
|
||||||
|
description: Restore operation initiated
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/RestoreOperation'
|
||||||
|
'404':
|
||||||
|
description: Backup not found
|
||||||
|
'409':
|
||||||
|
description: Restore operation already in progress
|
||||||
|
|
||||||
|
/monitoring/metrics:
|
||||||
|
get:
|
||||||
|
summary: Get monitoring metrics
|
||||||
|
description: Retrieve current monitoring metrics for all services
|
||||||
|
tags:
|
||||||
|
- Monitoring
|
||||||
|
parameters:
|
||||||
|
- name: service
|
||||||
|
in: query
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: metric
|
||||||
|
in: query
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
- name: timeRange
|
||||||
|
in: query
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: [1h, 6h, 24h, 7d, 30d]
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Metrics retrieved successfully
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
metrics:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/MonitoringMetric'
|
||||||
|
|
||||||
|
/monitoring/alerts:
|
||||||
|
get:
|
||||||
|
summary: Get active alerts
|
||||||
|
description: Retrieve list of active monitoring alerts
|
||||||
|
tags:
|
||||||
|
- Monitoring
|
||||||
|
parameters:
|
||||||
|
- name: severity
|
||||||
|
in: query
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: [critical, warning, info]
|
||||||
|
- name: status
|
||||||
|
in: query
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum: [active, acknowledged, resolved]
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Alerts retrieved successfully
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
alerts:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
$ref: '#/components/schemas/Alert'
|
||||||
|
|
||||||
|
post:
|
||||||
|
summary: Acknowledge alert
|
||||||
|
description: Acknowledge an active alert
|
||||||
|
tags:
|
||||||
|
- Monitoring
|
||||||
|
requestBody:
|
||||||
|
required: true
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/AlertAcknowledgment'
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Alert acknowledged successfully
|
||||||
|
'404':
|
||||||
|
description: Alert not found
|
||||||
|
|
||||||
|
components:
|
||||||
|
schemas:
|
||||||
|
Deployment:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
environment:
|
||||||
|
type: string
|
||||||
|
enum: [blue, green, staging, production]
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
enum: [planned, in_progress, testing, live, failed, decommissioned]
|
||||||
|
version:
|
||||||
|
type: string
|
||||||
|
services:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
createdAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
updatedAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
healthStatus:
|
||||||
|
type: string
|
||||||
|
enum: [healthy, unhealthy, unknown]
|
||||||
|
|
||||||
|
DeploymentRequest:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- environment
|
||||||
|
- version
|
||||||
|
properties:
|
||||||
|
environment:
|
||||||
|
type: string
|
||||||
|
enum: [blue, green, staging, production]
|
||||||
|
version:
|
||||||
|
type: string
|
||||||
|
services:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
rollbackPlan:
|
||||||
|
type: boolean
|
||||||
|
healthCheckTimeout:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
|
||||||
|
DeploymentUpdate:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
enum: [testing, live, failed, decommissioned]
|
||||||
|
rollback:
|
||||||
|
type: boolean
|
||||||
|
reason:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
BackupArchive:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
enum: [full, incremental, differential]
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
enum: [scheduled, in_progress, completed, failed, validated, expired]
|
||||||
|
environment:
|
||||||
|
type: string
|
||||||
|
size:
|
||||||
|
type: integer
|
||||||
|
format: int64
|
||||||
|
compressionRatio:
|
||||||
|
type: number
|
||||||
|
format: float
|
||||||
|
encrypted:
|
||||||
|
type: boolean
|
||||||
|
validated:
|
||||||
|
type: boolean
|
||||||
|
createdAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
expiresAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
retentionDays:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
|
||||||
|
BackupRequest:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- type
|
||||||
|
- environment
|
||||||
|
properties:
|
||||||
|
type:
|
||||||
|
type: string
|
||||||
|
enum: [full, incremental, differential]
|
||||||
|
environment:
|
||||||
|
type: string
|
||||||
|
include:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
enum: [databases, files, configurations, logs]
|
||||||
|
compression:
|
||||||
|
type: boolean
|
||||||
|
encryption:
|
||||||
|
type: boolean
|
||||||
|
validation:
|
||||||
|
type: boolean
|
||||||
|
|
||||||
|
RestoreRequest:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- targetEnvironment
|
||||||
|
properties:
|
||||||
|
targetEnvironment:
|
||||||
|
type: string
|
||||||
|
include:
|
||||||
|
type: array
|
||||||
|
items:
|
||||||
|
type: string
|
||||||
|
enum: [databases, files, configurations, logs]
|
||||||
|
confirm:
|
||||||
|
type: boolean
|
||||||
|
reason:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
RestoreOperation:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
backupId:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
targetEnvironment:
|
||||||
|
type: string
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
enum: [pending, in_progress, completed, failed]
|
||||||
|
progress:
|
||||||
|
type: integer
|
||||||
|
format: int32
|
||||||
|
estimatedCompletion:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
startedAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
|
||||||
|
MonitoringMetric:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
service:
|
||||||
|
type: string
|
||||||
|
metric:
|
||||||
|
type: string
|
||||||
|
value:
|
||||||
|
type: number
|
||||||
|
format: float
|
||||||
|
unit:
|
||||||
|
type: string
|
||||||
|
timestamp:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
labels:
|
||||||
|
type: object
|
||||||
|
additionalProperties:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
Alert:
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
id:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
rule:
|
||||||
|
type: string
|
||||||
|
severity:
|
||||||
|
type: string
|
||||||
|
enum: [critical, warning, info]
|
||||||
|
status:
|
||||||
|
type: string
|
||||||
|
enum: [active, acknowledged, resolved]
|
||||||
|
service:
|
||||||
|
type: string
|
||||||
|
message:
|
||||||
|
type: string
|
||||||
|
triggeredAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
acknowledgedAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
acknowledgedBy:
|
||||||
|
type: string
|
||||||
|
resolvedAt:
|
||||||
|
type: string
|
||||||
|
format: date-time
|
||||||
|
|
||||||
|
AlertAcknowledgment:
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- alertId
|
||||||
|
properties:
|
||||||
|
alertId:
|
||||||
|
type: string
|
||||||
|
format: uuid
|
||||||
|
acknowledgedBy:
|
||||||
|
type: string
|
||||||
|
note:
|
||||||
|
type: string
|
||||||
|
|
||||||
|
securitySchemes:
|
||||||
|
BearerAuth:
|
||||||
|
type: http
|
||||||
|
scheme: bearer
|
||||||
|
bearerFormat: JWT
|
||||||
|
|
||||||
|
security:
|
||||||
|
- BearerAuth: []
|
||||||
@@ -0,0 +1,249 @@
|
|||||||
|
# Data Model: Infrastructure Operations & Deployment Automation
|
||||||
|
|
||||||
|
**Date**: 2026-04-20
|
||||||
|
**Feature**: Infrastructure Operations & Deployment Automation
|
||||||
|
**Status**: Complete
|
||||||
|
|
||||||
|
## Infrastructure Entities
|
||||||
|
|
||||||
|
### Docker Compose Configuration
|
||||||
|
|
||||||
|
**Description**: Infrastructure as code definitions for all services, environments, and deployments
|
||||||
|
**Key Attributes**:
|
||||||
|
- Configuration ID (unique identifier)
|
||||||
|
- Environment (development/staging/production)
|
||||||
|
- Service definitions and dependencies
|
||||||
|
- Network configurations
|
||||||
|
- Volume mappings
|
||||||
|
- Environment variables (secrets excluded)
|
||||||
|
- Health check definitions
|
||||||
|
- Resource limits
|
||||||
|
- Security policies (user, capabilities, read-only)
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- All services must have health checks
|
||||||
|
- All containers must specify non-root user where possible
|
||||||
|
- All secrets must use external env files
|
||||||
|
- All images must use specific tags (no :latest)
|
||||||
|
- Resource limits must be defined for CPU and memory
|
||||||
|
|
||||||
|
### Backup Archive
|
||||||
|
|
||||||
|
**Description**: Complete system snapshots including databases, files, and configurations with metadata
|
||||||
|
**Key Attributes**:
|
||||||
|
- Archive ID (unique identifier)
|
||||||
|
- Timestamp (creation time)
|
||||||
|
- Backup type (full/incremental)
|
||||||
|
- Source environment
|
||||||
|
- Data sources (databases, files, configs)
|
||||||
|
- Compression status
|
||||||
|
- Encryption status
|
||||||
|
- Validation status
|
||||||
|
- Retention period
|
||||||
|
- Storage location
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- All archives must be encrypted
|
||||||
|
- All archives must have integrity validation
|
||||||
|
- Backup frequency: daily for critical data
|
||||||
|
- Retention: 30 days daily, 90 days weekly, 1 year monthly
|
||||||
|
- Must include database consistency checks
|
||||||
|
|
||||||
|
### Monitoring Metric
|
||||||
|
|
||||||
|
**Description**: Performance and health data points collected from all infrastructure components
|
||||||
|
**Key Attributes**:
|
||||||
|
- Metric ID (unique identifier)
|
||||||
|
- Source service/container
|
||||||
|
- Metric name and type
|
||||||
|
- Value and timestamp
|
||||||
|
- Labels and dimensions
|
||||||
|
- Threshold definitions
|
||||||
|
- Alert status
|
||||||
|
- Aggregation rules
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- All services must expose health metrics
|
||||||
|
- Critical metrics must have alert thresholds
|
||||||
|
- Data retention: 90 days detailed, 1 year aggregated
|
||||||
|
- Metrics must include CPU, memory, disk, network
|
||||||
|
- Application-specific metrics for business logic
|
||||||
|
|
||||||
|
### Security Policy
|
||||||
|
|
||||||
|
**Description**: Container hardening rules and compliance requirements for all deployments
|
||||||
|
**Key Attributes**:
|
||||||
|
- Policy ID (unique identifier)
|
||||||
|
- Policy type (user, capabilities, filesystem)
|
||||||
|
- Rule definitions
|
||||||
|
- Applicable services
|
||||||
|
- Compliance status
|
||||||
|
- Violation tracking
|
||||||
|
- Remediation procedures
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- All containers must run with non-root users
|
||||||
|
- All containers must drop unnecessary capabilities
|
||||||
|
- All containers must use read-only filesystems where possible
|
||||||
|
- All containers must have security options defined
|
||||||
|
- Regular vulnerability scanning required
|
||||||
|
|
||||||
|
### Deployment Environment
|
||||||
|
|
||||||
|
**Description**: Isolated runtime spaces with consistent configurations
|
||||||
|
**Key Attributes**:
|
||||||
|
- Environment ID (unique identifier)
|
||||||
|
- Environment type (blue/green)
|
||||||
|
- Service instances
|
||||||
|
- Network configuration
|
||||||
|
- Storage configuration
|
||||||
|
- Access controls
|
||||||
|
- Deployment status
|
||||||
|
- Health status
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- Blue and green environments must be identical
|
||||||
|
- Network isolation between environments
|
||||||
|
- Consistent configuration across environments
|
||||||
|
- Automated health checks required
|
||||||
|
- Traffic switching must be atomic
|
||||||
|
|
||||||
|
### Alert Rule
|
||||||
|
|
||||||
|
**Description**: Threshold-based conditions that trigger notifications when system metrics exceed limits
|
||||||
|
**Key Attributes**:
|
||||||
|
- Rule ID (unique identifier)
|
||||||
|
- Metric source
|
||||||
|
- Threshold conditions
|
||||||
|
- Severity levels
|
||||||
|
- Notification channels
|
||||||
|
- Escalation rules
|
||||||
|
- Suppression rules
|
||||||
|
- Acknowledgment status
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- All critical services must have alert rules
|
||||||
|
- Alert response time must be < 30 seconds
|
||||||
|
- Must include escalation paths
|
||||||
|
- Must define recovery procedures
|
||||||
|
- Regular alert testing required
|
||||||
|
|
||||||
|
### Secret Configuration
|
||||||
|
|
||||||
|
**Description**: Sensitive information managed outside version control
|
||||||
|
**Key Attributes**:
|
||||||
|
- Secret ID (unique identifier)
|
||||||
|
- Secret type (password, key, certificate)
|
||||||
|
- Usage context
|
||||||
|
- Access controls
|
||||||
|
- Rotation schedule
|
||||||
|
- Expiration date
|
||||||
|
- Compliance requirements
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- No secrets in version control
|
||||||
|
- All secrets must be encrypted at rest
|
||||||
|
- Access must be role-based
|
||||||
|
- Regular rotation required
|
||||||
|
- Audit trail for all access
|
||||||
|
|
||||||
|
### Service Instance
|
||||||
|
|
||||||
|
**Description**: Running container with specific configuration and health status
|
||||||
|
**Key Attributes**:
|
||||||
|
- Instance ID (unique identifier)
|
||||||
|
- Service name and version
|
||||||
|
- Container configuration
|
||||||
|
- Resource allocation
|
||||||
|
- Health status
|
||||||
|
- Start time
|
||||||
|
- Network endpoints
|
||||||
|
- Log configuration
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- All instances must have health checks
|
||||||
|
- Resource limits must be enforced
|
||||||
|
- Restart policies must be defined
|
||||||
|
- Log aggregation must be configured
|
||||||
|
- Performance monitoring required
|
||||||
|
|
||||||
|
### Infrastructure Change
|
||||||
|
|
||||||
|
**Description**: Version-controlled modification to system configuration or deployment
|
||||||
|
**Key Attributes**:
|
||||||
|
- Change ID (unique identifier)
|
||||||
|
- Change type (configuration, deployment, security)
|
||||||
|
- Description and rationale
|
||||||
|
- Approval status
|
||||||
|
- Implementation status
|
||||||
|
- Rollback plan
|
||||||
|
- Impact assessment
|
||||||
|
- Compliance validation
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- All changes must be version-controlled
|
||||||
|
- Changes require approval before production
|
||||||
|
- Rollback plans must be tested
|
||||||
|
- Impact assessment required
|
||||||
|
- Compliance validation mandatory
|
||||||
|
|
||||||
|
### Recovery Point
|
||||||
|
|
||||||
|
**Description**: Validated backup state that can be restored for disaster recovery
|
||||||
|
**Key Attributes**:
|
||||||
|
- Recovery point ID (unique identifier)
|
||||||
|
- Archive reference
|
||||||
|
- Validation status
|
||||||
|
- Recovery time objective
|
||||||
|
- Recovery procedures
|
||||||
|
- Test results
|
||||||
|
- Dependencies
|
||||||
|
|
||||||
|
**Validation Rules**:
|
||||||
|
- All recovery points must be tested
|
||||||
|
- RTO must be < 4 hours
|
||||||
|
- Recovery procedures must be documented
|
||||||
|
- Regular testing required
|
||||||
|
- Success rate must be > 95%
|
||||||
|
|
||||||
|
## State Transitions
|
||||||
|
|
||||||
|
### Deployment Lifecycle
|
||||||
|
```
|
||||||
|
Planned -> In Progress -> Testing -> Live -> Decommissioned
|
||||||
|
```
|
||||||
|
|
||||||
|
### Backup Lifecycle
|
||||||
|
```
|
||||||
|
Scheduled -> In Progress -> Completed -> Validated -> Expired
|
||||||
|
```
|
||||||
|
|
||||||
|
### Alert Lifecycle
|
||||||
|
```
|
||||||
|
Triggered -> Acknowledged -> Resolved -> Closed
|
||||||
|
```
|
||||||
|
|
||||||
|
### Change Management
|
||||||
|
```
|
||||||
|
Requested -> Approved -> Implemented -> Validated -> Closed
|
||||||
|
```
|
||||||
|
|
||||||
|
## Relationships
|
||||||
|
|
||||||
|
- **Environment** contains many **Service Instances**
|
||||||
|
- **Service Instance** generates **Monitoring Metrics**
|
||||||
|
- **Backup Archive** contains data from **Service Instances**
|
||||||
|
- **Alert Rule** monitors **Monitoring Metrics**
|
||||||
|
- **Security Policy** applies to **Service Instances**
|
||||||
|
- **Infrastructure Change** modifies **Deployment Environments**
|
||||||
|
- **Recovery Point** references **Backup Archive**
|
||||||
|
- **Secret Configuration** used by **Service Instances**
|
||||||
|
|
||||||
|
## Data Integrity Constraints
|
||||||
|
|
||||||
|
- All entities must have unique identifiers
|
||||||
|
- All timestamps must be UTC
|
||||||
|
- All audit fields must be immutable
|
||||||
|
- Foreign key relationships must be validated
|
||||||
|
- All sensitive data must be encrypted
|
||||||
|
- All changes must be auditable
|
||||||
@@ -0,0 +1,105 @@
|
|||||||
|
# Implementation Plan: [FEATURE]
|
||||||
|
|
||||||
|
**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link]
|
||||||
|
**Input**: Feature specification from `/specs/[###-feature-name]/spec.md`
|
||||||
|
|
||||||
|
**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
[Extract from feature spec: primary requirement + technical approach from research]
|
||||||
|
|
||||||
|
## Technical Context
|
||||||
|
|
||||||
|
<!--
|
||||||
|
ACTION REQUIRED: Replace the content in this section with the technical details
|
||||||
|
for the project. The structure here is presented in advisory capacity to guide
|
||||||
|
the iteration process.
|
||||||
|
-->
|
||||||
|
|
||||||
|
**Language/Version**: [e.g., Python 3.11, Swift 5.9, Rust 1.75 or NEEDS CLARIFICATION]
|
||||||
|
**Primary Dependencies**: [e.g., FastAPI, UIKit, LLVM or NEEDS CLARIFICATION]
|
||||||
|
**Storage**: [if applicable, e.g., PostgreSQL, CoreData, files or N/A]
|
||||||
|
**Testing**: [e.g., pytest, XCTest, cargo test or NEEDS CLARIFICATION]
|
||||||
|
**Target Platform**: [e.g., Linux server, iOS 15+, WASM or NEEDS CLARIFICATION]
|
||||||
|
**Project Type**: [single/web/mobile - determines source structure]
|
||||||
|
**Performance Goals**: [domain-specific, e.g., 1000 req/s, 10k lines/sec, 60 fps or NEEDS CLARIFICATION]
|
||||||
|
**Constraints**: [domain-specific, e.g., <200ms p95, <100MB memory, offline-capable or NEEDS CLARIFICATION]
|
||||||
|
**Scale/Scope**: [domain-specific, e.g., 10k users, 1M LOC, 50 screens or NEEDS CLARIFICATION]
|
||||||
|
|
||||||
|
## Constitution Check
|
||||||
|
|
||||||
|
_GATE: Must pass before Phase 0 research. Re-check after Phase 1 design._
|
||||||
|
|
||||||
|
[Gates determined based on constitution file]
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
### Documentation (this feature)
|
||||||
|
|
||||||
|
```text
|
||||||
|
specs/[###-feature]/
|
||||||
|
├── plan.md # This file (/speckit.plan command output)
|
||||||
|
├── research.md # Phase 0 output (/speckit.plan command)
|
||||||
|
├── data-model.md # Phase 1 output (/speckit.plan command)
|
||||||
|
├── quickstart.md # Phase 1 output (/speckit.plan command)
|
||||||
|
├── contracts/ # Phase 1 output (/speckit.plan command)
|
||||||
|
└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Source Code (repository root)
|
||||||
|
|
||||||
|
<!--
|
||||||
|
ACTION REQUIRED: Replace the placeholder tree below with the concrete layout
|
||||||
|
for this feature. Delete unused options and expand the chosen structure with
|
||||||
|
real paths (e.g., apps/admin, packages/something). The delivered plan must
|
||||||
|
not include Option labels.
|
||||||
|
-->
|
||||||
|
|
||||||
|
```text
|
||||||
|
# [REMOVE IF UNUSED] Option 1: Single project (DEFAULT)
|
||||||
|
src/
|
||||||
|
├── models/
|
||||||
|
├── services/
|
||||||
|
├── cli/
|
||||||
|
└── lib/
|
||||||
|
|
||||||
|
tests/
|
||||||
|
├── contract/
|
||||||
|
├── integration/
|
||||||
|
└── unit/
|
||||||
|
|
||||||
|
# [REMOVE IF UNUSED] Option 2: Web application (when "frontend" + "backend" detected)
|
||||||
|
backend/
|
||||||
|
├── src/
|
||||||
|
│ ├── models/
|
||||||
|
│ ├── services/
|
||||||
|
│ └── api/
|
||||||
|
└── tests/
|
||||||
|
|
||||||
|
frontend/
|
||||||
|
├── src/
|
||||||
|
│ ├── components/
|
||||||
|
│ ├── pages/
|
||||||
|
│ └── services/
|
||||||
|
└── tests/
|
||||||
|
|
||||||
|
# [REMOVE IF UNUSED] Option 3: Mobile + API (when "iOS/Android" detected)
|
||||||
|
api/
|
||||||
|
└── [same as backend above]
|
||||||
|
|
||||||
|
ios/ or android/
|
||||||
|
└── [platform-specific structure: feature modules, UI flows, platform tests]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Structure Decision**: [Document the selected structure and reference the real
|
||||||
|
directories captured above]
|
||||||
|
|
||||||
|
## Complexity Tracking
|
||||||
|
|
||||||
|
> **Fill ONLY if Constitution Check has violations that must be justified**
|
||||||
|
|
||||||
|
| Violation | Why Needed | Simpler Alternative Rejected Because |
|
||||||
|
| -------------------------- | ------------------ | ------------------------------------ |
|
||||||
|
| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
|
||||||
|
| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |
|
||||||
@@ -0,0 +1,293 @@
|
|||||||
|
# Quick Start Guide: Infrastructure Operations & Deployment Automation
|
||||||
|
|
||||||
|
**Purpose**: Get started with the Infrastructure Operations & Deployment Automation feature
|
||||||
|
**Date**: 2026-04-20
|
||||||
|
**Target Audience**: DevOps Engineers, System Administrators
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
### Hardware Requirements
|
||||||
|
- QNAP NAS (192.168.10.8) with Docker support
|
||||||
|
- ASUSTOR NAS (192.168.10.9) with Docker support
|
||||||
|
- SSH access between NAS devices configured
|
||||||
|
- Minimum 100GB storage for backups
|
||||||
|
|
||||||
|
### Software Requirements
|
||||||
|
- Docker 20.10+
|
||||||
|
- Docker Compose 2.0+
|
||||||
|
- Bash 5.0+ or PowerShell 7.2+
|
||||||
|
- Git client
|
||||||
|
- SSH key authentication
|
||||||
|
|
||||||
|
### Network Requirements
|
||||||
|
- Static IP addresses for both NAS devices
|
||||||
|
- Open ports: 22 (SSH), 80/443 (HTTP/HTTPS), 8080 (applications)
|
||||||
|
- VPN or secure network connection for remote access
|
||||||
|
|
||||||
|
## Initial Setup
|
||||||
|
|
||||||
|
### 1. Repository Configuration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Clone the repository
|
||||||
|
git clone https://git.np-dms.work/np-dms/lcbp3.git
|
||||||
|
cd lcbp3
|
||||||
|
|
||||||
|
# Switch to the infrastructure branch
|
||||||
|
git checkout 002-infra-ops
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. SSH Key Authentication
|
||||||
|
|
||||||
|
Ensure SSH keys are configured between QNAP and ASUSTOR:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Test SSH connectivity
|
||||||
|
ssh admin@192.168.10.8 "docker --version"
|
||||||
|
ssh admin@192.168.10.9 "docker --version"
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Environment Configuration
|
||||||
|
|
||||||
|
Copy and configure environment files:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# QNAP environments
|
||||||
|
cp specs/04-Infrastructure-OPS/04-00-docker-compose/QNAP/app/.env.example \
|
||||||
|
specs/04-Infrastructure-OPS/04-00-docker-compose/QNAP/app/.env
|
||||||
|
|
||||||
|
# ASUSTOR environments
|
||||||
|
cp specs/04-Infrastructure-OPS/04-00-docker-compose/ASUSTOR/registry/.env.example \
|
||||||
|
specs/04-Infrastructure-OPS/04-00-docker-compose/ASUSTOR/registry/.env
|
||||||
|
```
|
||||||
|
|
||||||
|
Edit the `.env` files with your specific configurations:
|
||||||
|
- Database passwords
|
||||||
|
- SSL certificate paths
|
||||||
|
- Backup storage locations
|
||||||
|
- Monitoring endpoints
|
||||||
|
|
||||||
|
## Core Services Deployment
|
||||||
|
|
||||||
|
### 1. Database Services (QNAP)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to QNAP database directory
|
||||||
|
cd specs/04-Infrastructure-OPS/04-00-docker-compose/QNAP/mariadb
|
||||||
|
|
||||||
|
# Deploy MariaDB with phpMyAdmin
|
||||||
|
docker-compose -f docker-compose-lcbp3-db.yml up -d
|
||||||
|
|
||||||
|
# Verify deployment
|
||||||
|
docker-compose -f docker-compose-lcbp3-db.yml ps
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Application Services (QNAP)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to QNAP app directory
|
||||||
|
cd specs/04-Infrastructure-OPS/04-00-docker-compose/QNAP/app
|
||||||
|
|
||||||
|
# Deploy backend, frontend, and ClamAV
|
||||||
|
docker-compose -f docker-compose-app.yml up -d
|
||||||
|
|
||||||
|
# Verify deployment
|
||||||
|
docker-compose -f docker-compose-app.yml ps
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Reverse Proxy (QNAP)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to Nginx Proxy Manager directory
|
||||||
|
cd specs/04-Infrastructure-OPS/04-00-docker-compose/QNAP/npm
|
||||||
|
|
||||||
|
# Deploy reverse proxy
|
||||||
|
docker-compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
|
# Access Nginx Proxy Manager
|
||||||
|
# URL: http://192.168.10.8:81
|
||||||
|
# Default: admin@example.com / changeme
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. Monitoring Stack (ASUSTOR)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to ASUSTOR monitoring directory
|
||||||
|
cd specs/04-Infrastructure-OPS/04-00-docker-compose/ASUSTOR/monitoring
|
||||||
|
|
||||||
|
# Deploy Prometheus, Grafana, and supporting services
|
||||||
|
docker-compose -f docker-compose.yml up -d
|
||||||
|
|
||||||
|
# Verify deployment
|
||||||
|
docker-compose -f docker-compose.yml ps
|
||||||
|
```
|
||||||
|
|
||||||
|
## SSL Certificate Setup
|
||||||
|
|
||||||
|
### 1. Initial Certificate Generation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# On QNAP, generate Let's Encrypt certificates
|
||||||
|
cd specs/04-Infrastructure-OPS/04-00-docker-compose/QNAP/npm
|
||||||
|
|
||||||
|
# Run certbot for initial certificate
|
||||||
|
docker-compose exec npm certbot --nginx -d your-domain.com
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Automated Renewal
|
||||||
|
|
||||||
|
Add to crontab for automatic renewal:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Edit crontab
|
||||||
|
crontab -e
|
||||||
|
|
||||||
|
# Add renewal task (runs daily at 2 AM)
|
||||||
|
0 2 * * * cd /path/to/npm && docker-compose exec npm certbot renew
|
||||||
|
```
|
||||||
|
|
||||||
|
## Backup Configuration
|
||||||
|
|
||||||
|
### 1. Initial Backup Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Navigate to backup scripts directory
|
||||||
|
cd specs/04-Infrastructure-OPS/04-02-backup-recovery
|
||||||
|
|
||||||
|
# Configure backup destinations
|
||||||
|
cp backup-config.example.yml backup-config.yml
|
||||||
|
|
||||||
|
# Edit backup-config.yml with your storage locations
|
||||||
|
nano backup-config.yml
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Automated Backup Schedule
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Add backup cron job (runs daily at 1 AM)
|
||||||
|
0 1 * * * /path/to/backup-scripts/daily-backup.sh
|
||||||
|
|
||||||
|
# Add backup validation (runs weekly on Sunday at 3 AM)
|
||||||
|
0 3 * * 0 /path/to/backup-scripts/validate-backups.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Monitoring Configuration
|
||||||
|
|
||||||
|
### 1. Grafana Dashboard Access
|
||||||
|
|
||||||
|
1. Access Grafana: `http://192.168.10.9:3000`
|
||||||
|
2. Default credentials: `admin / admin` (change on first login)
|
||||||
|
3. Import dashboards from `specs/04-Infrastructure-OPS/04-03-monitoring/dashboards/`
|
||||||
|
|
||||||
|
### 2. Alert Configuration
|
||||||
|
|
||||||
|
1. Access AlertManager: `http://192.168.10.9:9093`
|
||||||
|
2. Configure notification channels (email, Slack, etc.)
|
||||||
|
3. Test alert rules to ensure notifications work
|
||||||
|
|
||||||
|
## Blue-Green Deployment
|
||||||
|
|
||||||
|
### 1. Environment Setup
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create blue environment (current production)
|
||||||
|
cd specs/04-Infrastructure-OPS/04-00-docker-compose/QNAP/app
|
||||||
|
docker-compose -f docker-compose-app.yml -p app-blue up -d
|
||||||
|
|
||||||
|
# Create green environment (new version)
|
||||||
|
docker-compose -f docker-compose-app.yml -p app-green up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Traffic Switching
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Switch traffic to green environment
|
||||||
|
# Update Nginx Proxy Manager upstream configuration
|
||||||
|
# Point to green environment containers
|
||||||
|
# Test green environment functionality
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Rollback Procedure
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# If issues detected, rollback to blue
|
||||||
|
# Update Nginx Proxy Manager upstream configuration
|
||||||
|
# Point back to blue environment containers
|
||||||
|
# Stop green environment containers
|
||||||
|
```
|
||||||
|
|
||||||
|
## Security Hardening
|
||||||
|
|
||||||
|
### 1. Container Security Scan
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install Trivy
|
||||||
|
curl -sfL https://raw.githubusercontent.com/aquasecurity/trivy/main/contrib/install.sh | sh -s -- -b /usr/local/bin
|
||||||
|
|
||||||
|
# Scan all running containers
|
||||||
|
trivy image --severity HIGH,CRITICAL $(docker ps --format "table {{.Image}}" | tail -n +2)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Security Policy Validation
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run security validation script
|
||||||
|
cd specs/04-Infrastructure-OPS/04-06-security-operations
|
||||||
|
./validate-security-policies.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
1. **Container won't start**
|
||||||
|
```bash
|
||||||
|
# Check logs
|
||||||
|
docker-compose logs [service-name]
|
||||||
|
|
||||||
|
# Check resource usage
|
||||||
|
docker stats
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Backup failures**
|
||||||
|
```bash
|
||||||
|
# Check backup logs
|
||||||
|
tail -f /var/log/backup.log
|
||||||
|
|
||||||
|
# Test connectivity to backup storage
|
||||||
|
ping backup-storage-host
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Monitoring alerts not working**
|
||||||
|
```bash
|
||||||
|
# Check Prometheus targets
|
||||||
|
curl http://192.168.10.9:9090/api/v1/targets
|
||||||
|
|
||||||
|
# Test AlertManager
|
||||||
|
curl http://192.168.10.9:9093/api/v1/alerts
|
||||||
|
```
|
||||||
|
|
||||||
|
### Health Checks
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Check all services health
|
||||||
|
curl -f http://192.168.10.8:3000/health || echo "Backend unhealthy"
|
||||||
|
curl -f http://192.168.10.8/health || echo "Frontend unhealthy"
|
||||||
|
curl -f http://192.168.10.9:9090/-/healthy || echo "Prometheus unhealthy"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
1. **Configure automated monitoring alerts** for your specific thresholds
|
||||||
|
2. **Set up backup retention policies** based on your compliance requirements
|
||||||
|
3. **Implement disaster recovery testing** on a regular schedule
|
||||||
|
4. **Configure log aggregation** for centralized monitoring
|
||||||
|
5. **Set up automated security scanning** in your CI/CD pipeline
|
||||||
|
|
||||||
|
## Support
|
||||||
|
|
||||||
|
For issues and questions:
|
||||||
|
- Check the troubleshooting section above
|
||||||
|
- Review logs in `/var/log/` directories
|
||||||
|
- Consult the full documentation in `specs/04-Infrastructure-OPS/`
|
||||||
|
- Contact the infrastructure team for escalated issues
|
||||||
@@ -0,0 +1,82 @@
|
|||||||
|
# Phase 0 Research: Infrastructure Operations & Deployment Automation
|
||||||
|
|
||||||
|
**Date**: 2026-04-20
|
||||||
|
**Feature**: Infrastructure Operations & Deployment Automation
|
||||||
|
**Status**: Complete
|
||||||
|
|
||||||
|
## Research Findings
|
||||||
|
|
||||||
|
### Blue-Green Deployment Strategy
|
||||||
|
|
||||||
|
**Decision**: Docker Compose with Nginx Proxy Manager for traffic switching
|
||||||
|
**Rationale**: Provides zero-downtime deployments by maintaining two identical production environments (blue/green) and switching traffic via reverse proxy configuration updates
|
||||||
|
**Alternatives Considered**: Kubernetes (too complex for current scale), Docker Swarm (limited networking features), Manual deployment scripts (prone to human error)
|
||||||
|
|
||||||
|
### Backup & Recovery Solution
|
||||||
|
|
||||||
|
**Decision**: Restic for encrypted backups + MariaDB dump scripts + automated validation
|
||||||
|
**Rationale**: Restic provides deduplication, encryption, and cloud storage support. Combined with native database dumps ensures complete system state capture
|
||||||
|
**Alternatives Considered**: Borg Backup (steeper learning curve), rsync only (no encryption/deduplication), commercial solutions (cost constraints)
|
||||||
|
|
||||||
|
### Monitoring Stack
|
||||||
|
|
||||||
|
**Decision**: Prometheus + Grafana + AlertManager + Node Exporter + cAdvisor
|
||||||
|
**Rationale**: Industry-standard monitoring stack with extensive community support, flexible alerting rules, and container-native metrics collection
|
||||||
|
**Alternatives Considered**: Zabbix (more complex setup), Nagios (older architecture), Datadog (commercial cost)
|
||||||
|
|
||||||
|
### Container Security Hardening
|
||||||
|
|
||||||
|
**Decision**: Docker security hardening with non-root users, read-only filesystems, capability dropping, and Trivy scanning
|
||||||
|
**Rationale**: Provides defense-in-depth security while maintaining functionality. Trivy offers comprehensive vulnerability scanning
|
||||||
|
**Alternatives Considered**: Podman (better security but ecosystem compatibility issues), Kubernetes security policies (overkill for current scale)
|
||||||
|
|
||||||
|
### Multi-NAS Architecture
|
||||||
|
|
||||||
|
**Decision**: QNAP for primary services, ASUSTOR for backup/monitoring registry
|
||||||
|
**Rationale**: Leverages existing hardware investment, provides geographic separation for critical services, and maintains established SSH key authentication
|
||||||
|
**Alternatives Considered**: Cloud hosting (recurring costs, data sovereignty concerns), Single NAS (single point of failure)
|
||||||
|
|
||||||
|
### SSL Certificate Management
|
||||||
|
|
||||||
|
**Decision**: Certbot with Let's Encrypt + automated renewal via cron jobs
|
||||||
|
**Rationale**: Free, automated certificate management with established reliability. Integration with Nginx Proxy Manager simplifies deployment
|
||||||
|
**Alternatives Considered**: Commercial CAs (cost), Self-signed certificates (browser warnings), Cloudflare certificates (dependency on external service)
|
||||||
|
|
||||||
|
### Secrets Management
|
||||||
|
|
||||||
|
**Decision**: Environment files with .gitignore + SSH key authentication
|
||||||
|
**Rationale**: Simple, secure approach that works across both NAS environments. No additional infrastructure required
|
||||||
|
**Alternatives Considered**: HashiCorp Vault (complex setup), Docker Swarm secrets (limited to single host), Infisical/SOPS (additional learning curve)
|
||||||
|
|
||||||
|
## Technical Decisions Summary
|
||||||
|
|
||||||
|
1. **Docker Compose** as primary orchestration tool
|
||||||
|
2. **Blue-Green deployment** pattern for zero downtime
|
||||||
|
3. **Restic** for backup encryption and deduplication
|
||||||
|
4. **Prometheus/Grafana** stack for monitoring
|
||||||
|
5. **Nginx Proxy Manager** for reverse proxy and SSL termination
|
||||||
|
6. **Trivy** for container vulnerability scanning
|
||||||
|
7. **Environment files** for secrets management
|
||||||
|
8. **SSH key authentication** for cross-NAS communication
|
||||||
|
|
||||||
|
## Implementation Constraints
|
||||||
|
|
||||||
|
- Must maintain existing QNAP/ASUSTOR IP addresses (192.168.10.8/9)
|
||||||
|
- Must preserve current data storage locations
|
||||||
|
- Must integrate with existing Gitea Actions CI/CD pipeline
|
||||||
|
- Must comply with ADR-016 security requirements
|
||||||
|
- Must support Thai language documentation per project standards
|
||||||
|
|
||||||
|
## Success Metrics Alignment
|
||||||
|
|
||||||
|
All technical decisions support the success criteria defined in the specification:
|
||||||
|
|
||||||
|
- 99.9% uptime through redundant infrastructure
|
||||||
|
- 30-second alert generation via Prometheus monitoring
|
||||||
|
- 4-hour RTO through automated backup validation
|
||||||
|
- Zero-downtime deployments via blue-green strategy
|
||||||
|
- 100% security compliance via container hardening
|
||||||
|
|
||||||
|
## Next Steps
|
||||||
|
|
||||||
|
Proceed to Phase 1: Design & Contracts with these technical foundations established.
|
||||||
@@ -0,0 +1,187 @@
|
|||||||
|
# Feature Specification: Infrastructure Operations & Deployment Automation
|
||||||
|
|
||||||
|
**Feature Branch**: `002-infra-ops`
|
||||||
|
**Created**: 2026-04-20
|
||||||
|
**Status**: Draft
|
||||||
|
**Input**: User description: "Infrastructure operations and deployment automation including Docker Compose configurations, container orchestration, monitoring, backup/recovery, and maintenance procedures for the NAP-DMS system"
|
||||||
|
|
||||||
|
## Clarifications
|
||||||
|
|
||||||
|
### Session 2026-04-20
|
||||||
|
|
||||||
|
- Q: Which services are included in Infrastructure Operations scope beyond NAP-DMS applications?
|
||||||
|
- A: All services in Docker Compose stacks including Gitea, n8n, RocketChat, and supporting services
|
||||||
|
|
||||||
|
- Q: What is the expected data volume and annual growth rate for all services?
|
||||||
|
- A: 500GB current data with 20% annual growth
|
||||||
|
|
||||||
|
- Q: What external services or third-party integrations are required beyond internal services?
|
||||||
|
- A: Email SMTP for notifications and Let's Encrypt for SSL certificates
|
||||||
|
|
||||||
|
- Q: What are the concurrent user count and performance targets for response time?
|
||||||
|
- A: 100 concurrent users with 2-second average response time
|
||||||
|
|
||||||
|
- Q: What technical constraints exist (budget, hardware, compliance requirements)?
|
||||||
|
- A: Must work with existing QNAP/ASUSTOR hardware infrastructure
|
||||||
|
|
||||||
|
## User Scenarios & Testing _(mandatory)_
|
||||||
|
|
||||||
|
<!--
|
||||||
|
IMPORTANT: User stories should be PRIORITIZED as user journeys ordered by importance.
|
||||||
|
Each user story/journey must be INDEPENDENTLY TESTABLE - meaning if you implement just ONE of them,
|
||||||
|
you should still have a viable MVP (Minimum Viable Product) that delivers value.
|
||||||
|
|
||||||
|
Assign priorities (P1, P2, P3, etc.) to each story, where P1 is the most critical.
|
||||||
|
Think of each story as a standalone slice of functionality that can be:
|
||||||
|
- Developed independently
|
||||||
|
- Tested independently
|
||||||
|
- Deployed independently
|
||||||
|
- Demonstrated to users independently
|
||||||
|
-->
|
||||||
|
|
||||||
|
### User Story 1 - Zero-Downtime Deployment (Priority: P1)
|
||||||
|
|
||||||
|
As a DevOps engineer, I need to deploy updates for all services (NAP-DMS applications, databases, monitoring, Gitea, n8n, RocketChat, and supporting services) without interrupting user access to any system components.
|
||||||
|
|
||||||
|
**Why this priority**: Critical for business continuity - system cannot afford downtime during regular maintenance windows.
|
||||||
|
|
||||||
|
**Independent Test**: Can be fully tested by deploying a test application version using blue-green containers and verifying traffic switches seamlessly without user session interruption.
|
||||||
|
|
||||||
|
**Acceptance Scenarios**:
|
||||||
|
|
||||||
|
1. **Given** a running production environment, **When** I deploy a new version, **Then** users continue accessing the system without interruption
|
||||||
|
2. **Given** a deployment failure, **When** the rollback is triggered, **Then** the system immediately switches back to the previous stable version
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### User Story 2 - Automated Backup & Recovery (Priority: P1)
|
||||||
|
|
||||||
|
As a system administrator, I need automated daily backups of all services data (NAP-DMS applications, databases, monitoring, Gitea, n8n, RocketChat, configurations, and supporting services) and the ability to restore the entire system within 4 hours of a catastrophic failure.
|
||||||
|
|
||||||
|
**Why this priority**: Essential for data protection and business continuity compliance with document management regulations.
|
||||||
|
|
||||||
|
**Independent Test**: Can be fully tested by running backup procedures and performing a full system restore in a test environment to verify all data is recoverable.
|
||||||
|
|
||||||
|
**Acceptance Scenarios**:
|
||||||
|
|
||||||
|
1. **Given** the backup schedule is configured, **When** the daily backup runs, **Then** all databases, files, and configurations are successfully backed up
|
||||||
|
2. **Given** a system failure occurs, **When** I initiate recovery, **Then** the entire system is restored to its last known good state within 4 hours
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### User Story 3 - Real-time Monitoring & Alerting (Priority: P1)
|
||||||
|
|
||||||
|
As an on-call engineer, I need to receive immediate alerts when any system components (NAP-DMS applications, databases, monitoring, Gitea, n8n, RocketChat, and supporting services) fail or performance degrades below acceptable thresholds.
|
||||||
|
|
||||||
|
**Why this priority**: Prevents minor issues from becoming major outages and ensures rapid response to system problems.
|
||||||
|
|
||||||
|
**Independent Test**: Can be fully tested by simulating various failure scenarios and verifying appropriate alerts are generated and delivered to the correct channels.
|
||||||
|
|
||||||
|
**Acceptance Scenarios**:
|
||||||
|
|
||||||
|
1. **Given** monitoring is active, **When** a service becomes unresponsive, **Then** an alert is sent within 30 seconds
|
||||||
|
2. **Given** system resources exceed 80% utilization, **When** the threshold is crossed, **Then** a performance alert is generated with actionable diagnostics
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### User Story 4 - Container Security Hardening (Priority: P2)
|
||||||
|
|
||||||
|
As a security administrator, I need all containers (NAP-DMS applications, databases, monitoring, Gitea, n8n, RocketChat, and supporting services) to run with minimal privileges and no exposed secrets to maintain compliance with security policies.
|
||||||
|
|
||||||
|
**Why this priority**: Prevents privilege escalation attacks and protects sensitive configuration data.
|
||||||
|
|
||||||
|
**Independent Test**: Can be fully tested by running security scans on all containers and verifying they meet hardening requirements.
|
||||||
|
|
||||||
|
**Acceptance Scenarios**:
|
||||||
|
|
||||||
|
1. **Given** containers are deployed, **When** I run a security audit, **Then** all containers pass privilege escalation and secret exposure checks
|
||||||
|
2. **Given** new containers are added, **When** they are deployed, **Then** they automatically inherit security hardening policies
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### User Story 5 - Infrastructure as Code Management (Priority: P2)
|
||||||
|
|
||||||
|
As a DevOps engineer, I need to manage all infrastructure configurations (NAP-DMS applications, databases, monitoring, Gitea, n8n, RocketChat, and supporting services) through version-controlled code files rather than manual server changes.
|
||||||
|
|
||||||
|
**Why this priority**: Ensures consistency across environments and enables reproducible infrastructure deployments.
|
||||||
|
|
||||||
|
**Independent Test**: Can be fully tested by deploying a complete environment from code and verifying it matches the production configuration.
|
||||||
|
|
||||||
|
**Acceptance Scenarios**:
|
||||||
|
|
||||||
|
1. **Given** infrastructure code changes, **When** I apply the changes, **Then** the environment configuration matches exactly what's defined in the code
|
||||||
|
2. **Given** a new environment is needed, **When** I deploy from code, **Then** the environment is created with all required services and configurations
|
||||||
|
|
||||||
|
### Edge Cases
|
||||||
|
|
||||||
|
- What happens when network connectivity between QNAP and ASUSTOR fails during backup operations?
|
||||||
|
- How does system handle container registry authentication failures during deployment?
|
||||||
|
- What happens when Docker Compose files contain syntax errors during environment startup?
|
||||||
|
- How does system handle SSL certificate expiration for reverse proxy services?
|
||||||
|
- What happens when monitoring services become unavailable while system is running?
|
||||||
|
- How does system handle storage space exhaustion on production servers?
|
||||||
|
- What happens when multiple deployment processes are initiated simultaneously?
|
||||||
|
- How does system handle database connection pool exhaustion during high load?
|
||||||
|
- What happens when automated security updates conflict with custom container configurations?
|
||||||
|
- How does system handle partial backup failures where some services complete but others fail?
|
||||||
|
- How does system handle Email SMTP service failures for alert notifications?
|
||||||
|
- What happens when Let's Encrypt certificate renewal fails due to network issues?
|
||||||
|
|
||||||
|
## Requirements _(mandatory)_
|
||||||
|
|
||||||
|
<!--
|
||||||
|
ACTION REQUIRED: The content in this section represents placeholders.
|
||||||
|
Fill them out with the right functional requirements.
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Functional Requirements
|
||||||
|
|
||||||
|
- **FR-001**: System MUST support blue-green deployment strategy for zero-downtime updates of all services (NAP-DMS applications, databases, monitoring, Gitea, n8n, RocketChat, and supporting services)
|
||||||
|
- **FR-002**: System MUST automate daily backups of all services data including databases, application files, configurations, and supporting service data
|
||||||
|
- **FR-003**: System MUST provide complete disaster recovery capabilities with 4-hour RTO (Recovery Time Objective)
|
||||||
|
- **FR-004**: System MUST monitor all infrastructure components (all services) and generate alerts for failures or performance degradation
|
||||||
|
- **FR-005**: System MUST enforce container security hardening including non-root users, privilege dropping, and read-only filesystems for all services
|
||||||
|
- **FR-006**: System MUST manage all infrastructure configurations through version-controlled Docker Compose files for all services
|
||||||
|
- **FR-007**: System MUST support automated SSL certificate management and renewal for all web services
|
||||||
|
- **FR-008**: System MUST provide centralized logging aggregation for all containers and services
|
||||||
|
- **FR-009**: System MUST implement resource limits and health checks for all containers
|
||||||
|
- **FR-010**: System MUST support multi-environment deployments (development, staging, production) with consistent configurations
|
||||||
|
- **FR-011**: System MUST provide automated vulnerability scanning for all container images
|
||||||
|
- **FR-012**: System MUST support infrastructure secrets management without exposing them in version control
|
||||||
|
- **FR-013**: System MUST implement backup validation procedures to ensure data integrity
|
||||||
|
- **FR-014**: System MUST provide rollback capabilities for failed deployments
|
||||||
|
- **FR-015**: System MUST generate audit trails for all infrastructure changes and deployments
|
||||||
|
|
||||||
|
### Key Entities _(include if feature involves data)_
|
||||||
|
|
||||||
|
- **Docker Compose Configuration**: Infrastructure as code definitions for all services, environments, and deployments
|
||||||
|
- **Backup Archive**: Complete system snapshots including databases, files, and configurations with metadata (500GB current data, 20% annual growth)
|
||||||
|
- **Monitoring Metric**: Performance and health data points collected from all infrastructure components
|
||||||
|
- **Security Policy**: Container hardening rules and compliance requirements for all deployments
|
||||||
|
- **Deployment Environment**: Isolated runtime spaces (development, staging, production) with consistent configurations (constrained by existing QNAP/ASUSTOR hardware)
|
||||||
|
- **Alert Rule**: Threshold-based conditions that trigger notifications when system metrics exceed limits
|
||||||
|
- **Secret Configuration**: Sensitive information (passwords, keys, certificates) managed outside version control
|
||||||
|
- **Service Instance**: Running container with specific configuration, resource limits, and health status
|
||||||
|
- **Infrastructure Change**: Version-controlled modification to system configuration or deployment
|
||||||
|
- **Recovery Point**: Validated backup state that can be restored for disaster recovery
|
||||||
|
|
||||||
|
## Success Criteria _(mandatory)_
|
||||||
|
|
||||||
|
<!--
|
||||||
|
ACTION REQUIRED: Define measurable success criteria.
|
||||||
|
These must be technology-agnostic and measurable.
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Measurable Outcomes
|
||||||
|
|
||||||
|
- **SC-001**: Deployments complete with zero user-visible downtime in 99.9% of attempts
|
||||||
|
- **SC-002**: System recovery from backup completes within 4 hours with 100% data integrity
|
||||||
|
- **SC-003**: Critical system alerts are generated and delivered within 30 seconds of failure detection
|
||||||
|
- **SC-004**: All containers pass security hardening compliance checks with 100% success rate
|
||||||
|
- **SC-005**: Infrastructure changes are applied from version-controlled code with 100% consistency across environments
|
||||||
|
- **SC-006**: SSL certificates are renewed automatically with 0 expiration incidents per year
|
||||||
|
- **SC-007**: Backup validation procedures achieve 99.9% success rate with automated integrity verification
|
||||||
|
- **SC-008**: Failed deployments are automatically rolled back within 60 seconds with 100% success rate
|
||||||
|
- **SC-009**: System uptime exceeds 99.9% monthly availability target
|
||||||
|
- **SC-010**: Infrastructure audit trail captures 100% of configuration changes and deployments
|
||||||
|
- **SC-011**: System supports 100 concurrent users with 2-second average response time under normal load
|
||||||
@@ -3,10 +3,10 @@
|
|||||||
---
|
---
|
||||||
|
|
||||||
title: 'Network Design & Security'
|
title: 'Network Design & Security'
|
||||||
version: 1.8.0
|
version: 1.8.2
|
||||||
status: first-draft
|
status: first-draft
|
||||||
owner: Nattanin Peancharoen
|
owner: Nattanin Peancharoen
|
||||||
last_updated: 2026-02-23
|
last_updated: 2026-04-23
|
||||||
related:
|
related:
|
||||||
|
|
||||||
- specs/02-Architecture/00-01-system-context.md
|
- specs/02-Architecture/00-01-system-context.md
|
||||||
@@ -16,11 +16,11 @@ related:
|
|||||||
|
|
||||||
## 1. 🌐 Network Segmentation (VLANs) และหลักการ Zero Trust
|
## 1. 🌐 Network Segmentation (VLANs) และหลักการ Zero Trust
|
||||||
|
|
||||||
ระบบ LCBP3-DMS จัดแบ่งเครือข่ายออกเป็นเครือข่ายย่อย (VLANs) เพื่อการควบคุมการเข้าถึง (Access Control) ตามหลักการ Zero Trust โดยใช้อุปกรณ์ Network ของ Omada (ER7206 Router & SG2428P Core Switch) และ Switch ต่างๆ ในเครือข่าย
|
ระบบ LCBP3-DMS จัดแบ่งเครือข่ายออกเป็นเครือข่ายย่อย (VLANs) เพื่อการควบคุมการเข้าถึง (Access Control) ตามหลักการ Zero Trust โดยใช้อุปกรณ์ Network ของ Omada (ER7206 Router & SG3210X-M2 Core Switch) และ Switch ต่างๆ ในเครือข่าย
|
||||||
|
|
||||||
| VLAN ID | Name | Purpose | Subnet | Gateway | Notes |
|
| VLAN ID | Name | Purpose | Subnet | Gateway | Notes |
|
||||||
| ------- | -------------- | ----------------------- | --------------- | ------------ | ---------------------------------------------------- |
|
| ------- | -------------- | ----------------------- | --------------- | ------------ | ---------------------------------------------------- |
|
||||||
| 10 | SERVER | Server & Storage | 192.168.10.0/24 | 192.168.10.1 | Servers (QNAP, ASUSTOR). Static IPs ONLY. |
|
| 10 | SERVER | Server & Storage | 192.168.10.0/24 | 192.168.10.1 | Servers (QNAP, ASUSTOR, Zyxel NAS326). Static IPs ONLY. |
|
||||||
| 20 | MGMT (Default) | Management & Admin | 192.168.20.0/24 | 192.168.20.1 | Network devices (ER7206, OC200, Switches), Admin PC. |
|
| 20 | MGMT (Default) | Management & Admin | 192.168.20.0/24 | 192.168.20.1 | Network devices (ER7206, OC200, Switches), Admin PC. |
|
||||||
| 30 | USER | User Devices | 192.168.30.0/24 | 192.168.30.1 | Staff PC, Notebooks, Wi-Fi. |
|
| 30 | USER | User Devices | 192.168.30.0/24 | 192.168.30.1 | Staff PC, Notebooks, Wi-Fi. |
|
||||||
| 40 | CCTV | Surveillance | 192.168.40.0/24 | 192.168.40.1 | Cameras, NVR. Isolated. |
|
| 40 | CCTV | Surveillance | 192.168.40.0/24 | 192.168.40.1 | Cameras, NVR. Isolated. |
|
||||||
@@ -81,36 +81,42 @@ flowchart TB
|
|||||||
```mermaid
|
```mermaid
|
||||||
graph TB
|
graph TB
|
||||||
subgraph Internet
|
subgraph Internet
|
||||||
WAN[("🌐 Internet<br/>WAN")]
|
WAN[("Internet<br/>WAN")]
|
||||||
end
|
end
|
||||||
|
|
||||||
subgraph Router["ER7206 Router"]
|
subgraph Router["ER7206 Router"]
|
||||||
R[("🔲 ER7206<br/>192.168.20.1")]
|
R[("ER7206<br/>192.168.20.1")]
|
||||||
end
|
end
|
||||||
|
|
||||||
subgraph CoreSwitch["SG2428P Core Switch"]
|
subgraph CoreSwitch["SG3210X-M2 Core Switch"]
|
||||||
CS[("🔲 SG2428P<br/>192.168.20.2")]
|
CS[("SG3210X-M2<br/>192.168.20.4")]
|
||||||
end
|
end
|
||||||
|
|
||||||
subgraph ServerSwitch["AMPCOM 2.5G Switch"]
|
subgraph DistSwitch["SG2428P Distribution Switch"]
|
||||||
SS[("🔲 AMPCOM<br/>192.168.20.3")]
|
DS[("SG2428P<br/>192.168.20.2")]
|
||||||
end
|
end
|
||||||
|
|
||||||
subgraph Servers["VLAN 10 - Servers"]
|
subgraph Servers["VLAN 10 - Servers"]
|
||||||
QNAP[("💾 QNAP<br/>192.168.10.8")]
|
QNAP[(" QNAP<br/>192.168.10.8")]
|
||||||
ASUSTOR[("💾 ASUSTOR<br/>192.168.10.9")]
|
ASUSTOR[(" ASUSTOR<br/>192.168.10.9")]
|
||||||
|
Zyxel[(" Zyxel NAS326<br/>192.168.10.111")]
|
||||||
end
|
end
|
||||||
|
|
||||||
subgraph AccessPoints["EAP610 x16"]
|
subgraph AccessPoints["EAP610 x16"]
|
||||||
AP[("📶 WiFi APs")]
|
AP[(" WiFi APs")]
|
||||||
end
|
end
|
||||||
|
|
||||||
WAN --> R
|
subgraph AdminPC["Admin Desktop"]
|
||||||
R -->|Port 3| CS
|
PC[(" Admin PC<br/>192.168.20.100")]
|
||||||
CS -->|LAG Port 3-4| SS
|
end
|
||||||
SS -->|Port 3-4 LACP| QNAP
|
|
||||||
SS -->|Port 5-6 LACP| ASUSTOR
|
WAN -->|Port 2| R
|
||||||
CS -->|Port 5-20| AP
|
R -->|SFP Port 1| CS
|
||||||
|
CS -->|SFP+ Port 9| DS
|
||||||
|
CS -->|Port 3-4 LACP| QNAP
|
||||||
|
CS -->|Port 5-6 LACP| ASUSTOR
|
||||||
|
CS -->|Port 8| PC
|
||||||
|
DS -->|Port 1-16| AP
|
||||||
```
|
```
|
||||||
|
|
||||||
### 3.1 Switch Profiles & Interfaces
|
### 3.1 Switch Profiles & Interfaces
|
||||||
@@ -123,13 +129,258 @@ graph TB
|
|||||||
- **06_AP_TRUNK:** EAP610 Access Points (Native: 20, Tagged: 30, 70)
|
- **06_AP_TRUNK:** EAP610 Access Points (Native: 20, Tagged: 30, 70)
|
||||||
- **07_VOICE_ACCESS:** IP Phones (Native: 30, Tagged: 50, Untagged: 30)
|
- **07_VOICE_ACCESS:** IP Phones (Native: 30, Tagged: 50, Untagged: 30)
|
||||||
|
|
||||||
### 3.2 NAS NIC Bonding Configuration
|
### 3.2 Detailed Port Configuration
|
||||||
|
|
||||||
|
#### 3.2.1 TP-Link ER7206 (Router)
|
||||||
|
- **1× Gigabit SFP WAN/LAN port + 5× Gigabit RJ45 ports (1× WAN, 4× WAN/LAN)**
|
||||||
|
- SFP Port 1 WAN/LAN -> SG3210X-M2 Port 10 SFP+
|
||||||
|
- Port 2 WAN port uplink Internet
|
||||||
|
|
||||||
|
#### 3.2.2 TP-Link SG3210X-M2 (Core Switch)
|
||||||
|
- **8-Port 2.5Gbps + 2-Port 10G SFP+ Slots**
|
||||||
|
- Port 1&2 (Active LACP) -> Reserved
|
||||||
|
- Port 3&4 (Active LACP) -> QNAP 192.168.10.8
|
||||||
|
- Port 5&6 (Active LACP) -> ASUSTOR 192.168.10.9
|
||||||
|
- Port 7 Reserved
|
||||||
|
- Port 8 -> Admin Desktop (192.168.20.100)
|
||||||
|
- SFP+ Port 9 -> SG2428P (192.168.20.2) Port 28
|
||||||
|
- SFP+ Port 10 uplink ER7206 (192.168.20.1) Port 1
|
||||||
|
|
||||||
|
#### 3.2.3 TP-Link SG2428P (Distribution Switch)
|
||||||
|
- **24× 10/100/1000 Mbps RJ45 Ports + 4× Gigabit SFP Slots**
|
||||||
|
- Port 1-16 -> EAP610 (16 Access Points)
|
||||||
|
- Port 17 Reserved for TP-07 (LAN port)
|
||||||
|
- Port 18 TP-08 (LAN port)
|
||||||
|
- Port 19 -> TL-SG1210P Port 9 (Voice Switch)
|
||||||
|
- Port 20 Reserved
|
||||||
|
- Port 21 TP-11 (LAN port)
|
||||||
|
- Port 22 Reserved
|
||||||
|
- Port 23 -> Printer
|
||||||
|
- Port 24 uplink OC200
|
||||||
|
- SFP Port 25 Reserved
|
||||||
|
- SFP Port 26 -> TL-SL1226P SFP
|
||||||
|
- SFP Port 27 Reserved
|
||||||
|
- SFP Port 28 uplink SG3210X-M2 SFP+
|
||||||
|
|
||||||
|
#### 3.2.4 TP-Link TL-SL1226P (CCTV Switch)
|
||||||
|
- **24× PoE+ 10/100 Mbps RJ45 ports, 2× Gigabit RJ45 ports, and 2× combo Gigabit SFP**
|
||||||
|
- Port 1-6 -> CCTV (6 cameras)
|
||||||
|
- 1000 Mbps Port 25 -> NVR
|
||||||
|
- SFP Port 26 uplink SG2428P Port 26
|
||||||
|
|
||||||
|
#### 3.2.5 TP-Link TL-SG1210P (Voice Switch)
|
||||||
|
- **9 Port 10/100/1000Mbps RJ45 ports, 1 Gigabit SFP port**
|
||||||
|
- Port 1-8 -> IP Phone (TP-01 to TP-06, TP-09, TP-10)
|
||||||
|
- Port 9 uplink SG2428P Port 19
|
||||||
|
- SFP Port 10 Reserved
|
||||||
|
|
||||||
|
### 3.3 VLAN Assignment Table
|
||||||
|
|
||||||
|
#### 3.3.1 SG3210X-M2 (Core Switch)
|
||||||
|
|
||||||
|
| Port | Connection | VLAN Mode | Native VLAN | Tagged VLANs | Profile |
|
||||||
|
|------|------------|-----------|-------------|--------------|---------|
|
||||||
|
| 1-2 | Reserved (LACP) | Trunk | 20 | 10,20,30,40,50,60,70 | 01_CORE_TRUNK |
|
||||||
|
| 3-4 | QNAP (LACP) | Access | 10 | - | 03_SERVER_ACCESS |
|
||||||
|
| 5-6 | ASUSTOR (LACP) | Access | 10 | - | 03_SERVER_ACCESS |
|
||||||
|
| 7 | Reserved | - | - | - | - |
|
||||||
|
| 8 | Admin Desktop | Access | 20 | - | 02_MGMT_ONLY |
|
||||||
|
| 9 (SFP+) | SG2428P | Trunk | 20 | 10,20,30,40,50,70 | 01_CORE_TRUNK |
|
||||||
|
| 10 (SFP+) | ER7206 | Trunk | 20 | 10,20,30,40,50,70 | 01_CORE_TRUNK |
|
||||||
|
|
||||||
|
#### 3.3.2 SG2428P (Distribution Switch)
|
||||||
|
|
||||||
|
| Port | Connection | VLAN Mode | Native VLAN | Tagged VLANs | Profile |
|
||||||
|
|------|------------|-----------|-------------|--------------|---------|
|
||||||
|
| 1-16 | EAP610 APs | Trunk | 20 | 30,70 | 06_AP_TRUNK |
|
||||||
|
| 17 | TP-07 (LAN) | Access | 30 | - | 05_USER_ACCESS |
|
||||||
|
| 18 | TP-08 (LAN) | Access | 30 | - | 05_USER_ACCESS |
|
||||||
|
| 19 | TL-SG1210P | Trunk | 30 | 50 | 07_VOICE_ACCESS |
|
||||||
|
| 20 | Reserved | - | - | - | - |
|
||||||
|
| 21 | TP-11 (LAN) | Access | 30 | - | 05_USER_ACCESS |
|
||||||
|
| 22 | Reserved | - | - | - | - |
|
||||||
|
| 23 | Printer | Access | 30 | - | 05_USER_ACCESS |
|
||||||
|
| 24 | OC200 | Access | 20 | - | 02_MGMT_ONLY |
|
||||||
|
| 25 (SFP) | Reserved | - | - | - | - |
|
||||||
|
| 26 (SFP) | TL-SL1226P | Trunk | 20 | 40 | 04_CCTV_ACCESS |
|
||||||
|
| 27 (SFP) | Reserved | - | - | - | - |
|
||||||
|
| 28 (SFP) | SG3210X-M2 | Trunk | 20 | 10,20,30,40,50,70 | 01_CORE_TRUNK |
|
||||||
|
|
||||||
|
#### 3.3.3 TL-SL1226P (CCTV Switch)
|
||||||
|
|
||||||
|
| Port | Connection | VLAN Mode | Native VLAN | Tagged VLANs | Profile |
|
||||||
|
|------|------------|-----------|-------------|--------------|---------|
|
||||||
|
| 1-6 | CCTV Cameras | Access | 40 | - | 04_CCTV_ACCESS |
|
||||||
|
| 7-24 | Reserved | - | - | - | - |
|
||||||
|
| 25 | NVR | Access | 40 | - | 04_CCTV_ACCESS |
|
||||||
|
| 26 | SG2428P | Trunk | 20 | 40 | 04_CCTV_ACCESS |
|
||||||
|
|
||||||
|
#### 3.3.4 TL-SG1210P (Voice Switch)
|
||||||
|
|
||||||
|
| Port | Connection | VLAN Mode | Native VLAN | Tagged VLANs | Profile |
|
||||||
|
|------|------------|-----------|-------------|--------------|---------|
|
||||||
|
| 1-8 | IP Phone + PC Passthrough | Trunk | 30 (Data) | 50 (Voice) | 07_VOICE_ACCESS |
|
||||||
|
| 9 | SG2428P | Trunk | 30 | 50 | 07_VOICE_ACCESS |
|
||||||
|
| 10 (SFP) | Reserved | - | - | - | - |
|
||||||
|
|
||||||
|
**Note:** IP Phone ports support PC passthrough - Native VLAN 30 for PC data, Tagged VLAN 50 for VoIP traffic.
|
||||||
|
|
||||||
|
### 3.4 NAS NIC Bonding Configuration
|
||||||
|
|
||||||
| Device | Bonding Mode | Member Ports | VLAN Mode | Tagged VLAN | IP Address | Gateway | Notes |
|
| Device | Bonding Mode | Member Ports | VLAN Mode | Tagged VLAN | IP Address | Gateway | Notes |
|
||||||
| ------- | ------------------- | ------------ | --------- | ----------- | --------------- | ------------ | ---------------------- |
|
| ------- | ------------------- | ------------ | --------- | ----------- | --------------- | ------------ | ---------------------- |
|
||||||
| QNAP | IEEE 802.3ad (LACP) | Adapter 1, 2 | Untagged | 10 (SERVER) | 192.168.10.8/24 | 192.168.10.1 | Primary NAS for DMS |
|
| QNAP | IEEE 802.3ad (LACP) | Adapter 1, 2 | Untagged | 10 (SERVER) | 192.168.10.8/24 | 192.168.10.1 | Primary NAS for DMS |
|
||||||
| ASUSTOR | IEEE 802.3ad (LACP) | Port 1, 2 | Untagged | 10 (SERVER) | 192.168.10.9/24 | 192.168.10.1 | Backup / Secondary NAS |
|
| ASUSTOR | IEEE 802.3ad (LACP) | Port 1, 2 | Untagged | 10 (SERVER) | 192.168.10.9/24 | 192.168.10.1 | Backup / Secondary NAS |
|
||||||
|
|
||||||
|
### 3.5 PoE Budget & Power Consumption
|
||||||
|
|
||||||
|
#### 3.5.1 SG2428P (Distribution Switch)
|
||||||
|
|
||||||
|
| Specification | Value |
|
||||||
|
|---------------|-------|
|
||||||
|
| Total PoE Budget | 370W |
|
||||||
|
| PoE Standard | IEEE 802.3at (PoE+) |
|
||||||
|
| PoE Ports | 1-16 (RJ45), 25-26 (SFP) |
|
||||||
|
|
||||||
|
**Power Consumption Estimate:**
|
||||||
|
|
||||||
|
| Device | Quantity | Power per Device | Total Power | Port Assignment |
|
||||||
|
|--------|----------|-----------------|-------------|----------------|
|
||||||
|
| EAP610 Access Point | 16 | ~12.95W | ~207W | Port 1-16 |
|
||||||
|
| TL-SL1226P Uplink | 1 | ~15W | ~15W | Port 26 (SFP) |
|
||||||
|
| **Total Used** | - | - | **~222W** | - |
|
||||||
|
| **Available** | - | - | **148W** | - |
|
||||||
|
| **Utilization** | - | - | **60%** | - |
|
||||||
|
|
||||||
|
#### 3.5.2 TL-SL1226P (CCTV Switch)
|
||||||
|
|
||||||
|
| Specification | Value |
|
||||||
|
|---------------|-------|
|
||||||
|
| Total PoE Budget | 195W |
|
||||||
|
| PoE Standard | IEEE 802.3at (PoE+) |
|
||||||
|
| PoE Ports | 1-24 (RJ45) |
|
||||||
|
|
||||||
|
**Power Consumption Estimate:**
|
||||||
|
|
||||||
|
| Device | Quantity | Power per Device | Total Power | Port Assignment |
|
||||||
|
|--------|----------|-----------------|-------------|----------------|
|
||||||
|
| CCTV Camera | 6 | ~8W | ~48W | Port 1-6 |
|
||||||
|
| NVR (Non-PoE) | 1 | 0W | 0W | Port 25 (1000Mbps) |
|
||||||
|
| **Total Used** | - | - | **48W** | - |
|
||||||
|
| **Available** | - | - | **147W** | - |
|
||||||
|
| **Utilization** | - | - | **25%** | - |
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> PoE budget has sufficient headroom for future expansion. SG2428P can support additional ~12 APs, TL-SL1226P can support additional ~12 cameras.
|
||||||
|
|
||||||
|
### 3.6 Cable Specifications
|
||||||
|
|
||||||
|
| Link Type | Cable Category | Max Distance | Application |
|
||||||
|
|-----------|----------------|--------------|-------------|
|
||||||
|
| 10Gbps Uplinks (SFP+) | Cat6a / Cat7 | 100m | SG3210X-M2 ↔ SG2428P, ER7206 ↔ SG3210X-M2 |
|
||||||
|
| 2.5Gbps Server Links | Cat6 | 100m | SG3210X-M2 ↔ QNAP/ASUSTOR (LACP) |
|
||||||
|
| 1Gbps Standard Links | Cat5e / Cat6 | 100m | All other RJ45 connections |
|
||||||
|
| IP Phone Passthrough | Cat5e / Cat6 | 100m | IP Phone + PC connections |
|
||||||
|
|
||||||
|
**Cable Color Coding:**
|
||||||
|
- **Blue:** Uplink/Trunk links (SFP+, LACP)
|
||||||
|
- **Green:** Server connections (VLAN 10)
|
||||||
|
- **Yellow:** Management connections (VLAN 20)
|
||||||
|
- **Red:** CCTV/Voice connections (VLAN 40, 50)
|
||||||
|
- **Orange:** User connections (VLAN 30)
|
||||||
|
|
||||||
|
### 3.7 QoS (Quality of Service) Settings
|
||||||
|
|
||||||
|
#### 3.7.1 Priority Levels (DSCP)
|
||||||
|
|
||||||
|
| Priority | DSCP Value | Traffic Type | Application |
|
||||||
|
|----------|------------|--------------|-------------|
|
||||||
|
| Highest (7) | EF (46) | Voice (SIP/RTP) | IP Phones (VLAN 50) |
|
||||||
|
| High (6) | AF41 (34) | Video Surveillance | CCTV Cameras (VLAN 40) |
|
||||||
|
| Medium (5) | AF31 (26) | Critical Applications | DMS Backend, Database |
|
||||||
|
| Low (4) | AF21 (18) | Best Effort | Web browsing, Email |
|
||||||
|
| Lowest (0) | CS0 (0) | Background | File downloads, Updates |
|
||||||
|
|
||||||
|
#### 3.7.2 QoS Configuration per Switch
|
||||||
|
|
||||||
|
**SG3210X-M2 (Core Switch):**
|
||||||
|
- Enable QoS globally
|
||||||
|
- Trust DSCP on all trunk ports
|
||||||
|
- Prioritize Voice (VLAN 50) and Video (VLAN 40) traffic
|
||||||
|
- Rate limit Guest VLAN (70) to 10Mbps per client
|
||||||
|
|
||||||
|
**SG2428P (Distribution Switch):**
|
||||||
|
- Enable QoS globally
|
||||||
|
- Trust DSCP on uplink ports (SFP+ 28, RJ45 19)
|
||||||
|
- Map VLAN 50 to Queue 7 (Highest)
|
||||||
|
- Map VLAN 40 to Queue 6 (High)
|
||||||
|
- Map VLAN 10 to Queue 5 (Medium)
|
||||||
|
|
||||||
|
**TL-SL1226P (CCTV Switch):**
|
||||||
|
- Enable QoS globally
|
||||||
|
- Map all CCTV ports to Queue 6 (High)
|
||||||
|
- Ensure NVR traffic has priority
|
||||||
|
|
||||||
|
**TL-SG1210P (Voice Switch):**
|
||||||
|
- Enable QoS globally
|
||||||
|
- Map VLAN 50 to Queue 7 (Highest)
|
||||||
|
- Map VLAN 30 to Queue 4 (Low - for PC data)
|
||||||
|
- Enable LLDP-MED for IP Phone power negotiation
|
||||||
|
|
||||||
|
### 3.8 Redundancy Planning & Network Resilience
|
||||||
|
|
||||||
|
#### 3.8.1 Critical Links Redundancy
|
||||||
|
|
||||||
|
| Critical Path | Primary Link | Backup Link | Failover Time | Implementation Status |
|
||||||
|
|---------------|--------------|-------------|---------------|-----------------------|
|
||||||
|
| Internet Access | ER7206 WAN Port 2 | 4G/LTE Backup | < 30s | Planned (Q3 2026) |
|
||||||
|
| Core Switch Connectivity | SG3210X-M2 SFP+ Port 9-10 | SG3210X-M2 Port 1-2 (LACP) | < 1s | Ready (Ports Reserved) |
|
||||||
|
| Server Connectivity | QNAP LACP (Ports 3-4) | ASUSTOR LACP (Ports 5-6) | < 1s | Active |
|
||||||
|
| Distribution Layer | SG2428P SFP+ Port 28 | SG2428P Port 20 | < 5s | Planned |
|
||||||
|
| Controller Management | OC200 Port 24 | OC200 Wireless Fallback | < 10s | Active |
|
||||||
|
|
||||||
|
#### 3.8.2 Single Points of Failure (SPOF) Analysis
|
||||||
|
|
||||||
|
| Component | Risk Level | Mitigation Strategy | Target Resolution |
|
||||||
|
|-----------|------------|---------------------|-------------------|
|
||||||
|
| ER7206 Router | HIGH | Add secondary router (VRRP) | Q3 2026 |
|
||||||
|
| SG3210X-M2 Core Switch | MEDIUM | Utilize reserved LACP ports 1-2 | Immediate |
|
||||||
|
| QNAP Primary Storage | MEDIUM | ASUSTOR backup with real-time sync | Active |
|
||||||
|
| Internet Connection | HIGH | 4G/LTE failover router | Q3 2026 |
|
||||||
|
| Power Supply | MEDIUM | UPS + Generator maintenance | Ongoing |
|
||||||
|
|
||||||
|
#### 3.8.3 Network Monitoring & Alerting
|
||||||
|
|
||||||
|
| Monitor Item | Threshold | Alert Method | Escalation |
|
||||||
|
|--------------|-----------|--------------|------------|
|
||||||
|
| Link Utilization > 80% | 5 min | Email + Teams | Network Admin |
|
||||||
|
| Link Down | Immediate | SMS + Email | Network Admin |
|
||||||
|
| High Latency > 100ms | 2 min | Email | Network Admin |
|
||||||
|
| Packet Loss > 1% | 3 min | Email | Network Admin |
|
||||||
|
| VLAN Misconfiguration | Immediate | Email | Network Admin |
|
||||||
|
|
||||||
|
#### 3.8.4 Disaster Recovery Procedures
|
||||||
|
|
||||||
|
1. **Core Switch Failure:**
|
||||||
|
- Activate LACP ports 1-2 on SG3210X-M2
|
||||||
|
- Re-route critical traffic through backup paths
|
||||||
|
- Restore within 15 minutes
|
||||||
|
|
||||||
|
2. **Router Failure:**
|
||||||
|
- Manual failover to backup router
|
||||||
|
- Update DHCP gateway addresses
|
||||||
|
- Restore within 30 minutes
|
||||||
|
|
||||||
|
3. **Internet Outage:**
|
||||||
|
- Activate 4G/LTE backup connection
|
||||||
|
- Update DNS records if needed
|
||||||
|
- Restore within 5 minutes
|
||||||
|
|
||||||
|
4. **Power Outage:**
|
||||||
|
- UPS maintains critical infrastructure for 2 hours
|
||||||
|
- Generator activates after 5 minutes
|
||||||
|
- Full service maintained
|
||||||
|
|
||||||
## 4. 🔥 Firewall Rules (ACLs) & Port Forwarding
|
## 4. 🔥 Firewall Rules (ACLs) & Port Forwarding
|
||||||
|
|
||||||
กฎของ Firewall จะถูกกำหนดบน Omada Controller และอุปกรณ์ Gateway (ER7206) ตามหลักการอนุญาตแค่สิ่งที่ต้องการ (Default Deny)
|
กฎของ Firewall จะถูกกำหนดบน Omada Controller และอุปกรณ์ Gateway (ER7206) ตามหลักการอนุญาตแค่สิ่งที่ต้องการ (Default Deny)
|
||||||
@@ -138,11 +389,11 @@ graph TB
|
|||||||
|
|
||||||
**IP Groups:**
|
**IP Groups:**
|
||||||
|
|
||||||
- `Server`: 192.168.10.8, 192.168.10.9, 192.168.10.111
|
- `Server`: 192.168.10.8 (QNAP), 192.168.10.9 (ASUSTOR), 192.168.10.111 (Zyxel NAS326)
|
||||||
- `Omada-Controller`: 192.168.20.250
|
- `Omada-Controller`: 192.168.20.250
|
||||||
- `DHCP-Gateways`: 192.168.30.1, 192.168.70.1
|
- `DHCP-Gateways`: 192.168.30.1, 192.168.70.1
|
||||||
- `QNAP_Services`: 192.168.10.8
|
- `QNAP_Services`: 192.168.10.8
|
||||||
- `Internal`: 192.168.10.0/24, 192.168.20.0/24, 192.168.30.0/24
|
- `Internal`: 192.168.10.0/24, 192.168.20.0/24, 192.168.30.0/24, 192.168.40.0/24, 192.168.50.0/24
|
||||||
- `Blacklist`: (เพิ่ม IP ประสงค์ร้าย)
|
- `Blacklist`: (เพิ่ม IP ประสงค์ร้าย)
|
||||||
|
|
||||||
**Port Groups:**
|
**Port Groups:**
|
||||||
|
|||||||
@@ -0,0 +1,65 @@
|
|||||||
|
# Schema Deltas
|
||||||
|
|
||||||
|
Incremental SQL scripts applied to existing environments **after** the canonical schema
|
||||||
|
(`../lcbp3-v1.8.0-schema-02-tables.sql`) has been updated.
|
||||||
|
|
||||||
|
## Naming Convention
|
||||||
|
|
||||||
|
```
|
||||||
|
YYYY-MM-DD-descriptive-name.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
- `2026-04-22-add-rfa-revision-column.sql`
|
||||||
|
- `2026-04-25-index-correspondence-created-at.sql`
|
||||||
|
- `2026-05-01-add-workflow-step-attachment-table.sql`
|
||||||
|
|
||||||
|
## Rules (per ADR-009)
|
||||||
|
|
||||||
|
1. **Never replace** the canonical `lcbp3-v1.8.x-schema-02-tables.sql` — update it first, then add the delta here.
|
||||||
|
2. **Idempotent where possible** — prefer `CREATE TABLE IF NOT EXISTS`, `ALTER TABLE … ADD COLUMN IF NOT EXISTS`, etc.
|
||||||
|
3. **No TypeORM migrations** — these `.sql` files are the only schema deployment mechanism.
|
||||||
|
4. **Data backfill** goes through **n8n workflows**, not this directory.
|
||||||
|
5. **Update Data Dictionary** (`../03-01-data-dictionary.md`) in the same PR that adds a delta.
|
||||||
|
|
||||||
|
## Delta Template
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Delta: <short description>
|
||||||
|
-- Date: YYYY-MM-DD
|
||||||
|
-- Related ADR: ADR-XXX (if applicable)
|
||||||
|
-- Related Spec: specs/NN-NAME/spec.md (if applicable)
|
||||||
|
-- Applied in: v1.8.X → v1.8.Y
|
||||||
|
|
||||||
|
-- ------------------------------------------------------------
|
||||||
|
-- Schema changes
|
||||||
|
-- ------------------------------------------------------------
|
||||||
|
|
||||||
|
ALTER TABLE <table>
|
||||||
|
ADD COLUMN <col> <type> <constraints>;
|
||||||
|
|
||||||
|
-- ------------------------------------------------------------
|
||||||
|
-- Indexes (if needed)
|
||||||
|
-- ------------------------------------------------------------
|
||||||
|
|
||||||
|
CREATE INDEX idx_<table>_<col> ON <table>(<col>);
|
||||||
|
|
||||||
|
-- ------------------------------------------------------------
|
||||||
|
-- Verification query (optional)
|
||||||
|
-- ------------------------------------------------------------
|
||||||
|
|
||||||
|
-- SELECT COUNT(*) FROM <table> WHERE <col> IS NOT NULL;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Rollback
|
||||||
|
|
||||||
|
Every delta should have a reversible companion (`YYYY-MM-DD-descriptive-name.rollback.sql`)
|
||||||
|
where physically possible. Dropping `NOT NULL` columns with existing data is explicitly
|
||||||
|
irreversible — document in the delta header when rollback is impossible.
|
||||||
|
|
||||||
|
## References
|
||||||
|
|
||||||
|
- [ADR-009 Database Migration Strategy](../../06-Decision-Records/ADR-009-database-migration-strategy.md)
|
||||||
|
- [Canonical Schema](../lcbp3-v1.8.0-schema-02-tables.sql)
|
||||||
|
- [Data Dictionary](../03-01-data-dictionary.md)
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
# Gitea
|
||||||
|
GITEA_INSTANCE_URL=https://git.np-dms.work
|
||||||
|
GITEA_RUNNER_REGISTRATION_TOKEN=FGaSCT79PmMg8cDy0Ltqt1yaLzs8D4MRMFAE3jCh
|
||||||
|
GITEA_RUNNER_NAME=asustor-runner
|
||||||
+21
@@ -0,0 +1,21 @@
|
|||||||
|
# File: /volume1/np-dms/gitea-runner/docker-compose.yml
|
||||||
|
# Deploy on: ASUSTOR AS5403T
|
||||||
|
# เชื่อมต่อกับ Gitea บน QNAP ผ่าน Domain URL
|
||||||
|
|
||||||
|
version: "3.8"
|
||||||
|
|
||||||
|
services:
|
||||||
|
runner:
|
||||||
|
image: gitea/act_runner:latest
|
||||||
|
container_name: gitea-runner
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
# ใช้ Domain URL เพื่อเชื่อมต่อ Gitea ข้ามเครื่อง (QNAP)
|
||||||
|
- GITEA_INSTANCE_URL=https://git.np-dms.work
|
||||||
|
- GITEA_RUNNER_REGISTRATION_TOKEN=FGaSCT79PmMg8cDy0Ltqt1yaLzs8D4MRMFAE3jCh
|
||||||
|
- GITEA_RUNNER_NAME=asustor-runner
|
||||||
|
# Label ต้องตรงกับ runs-on ใน deploy.yaml
|
||||||
|
- GITEA_RUNNER_LABELS=ubuntu-latest:docker://node:18-bullseye,self-hosted:docker://node:18-bullseye
|
||||||
|
volumes:
|
||||||
|
- /volume1/np-dms/gitea-runner/data:/data
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
+3
-2
@@ -1,4 +1,5 @@
|
|||||||
# File: /volume1/np-dms/gitea-runner/docker-compose.yml
|
# File: /volume1/np-dms/gitea-runner/docker-compose.yml
|
||||||
|
# DMS Container v1.8.6: Application name: lcbp3-gitea-runner
|
||||||
# Deploy on: ASUSTOR AS5403T
|
# Deploy on: ASUSTOR AS5403T
|
||||||
# เชื่อมต่อกับ Gitea บน QNAP ผ่าน Domain URL
|
# เชื่อมต่อกับ Gitea บน QNAP ผ่าน Domain URL
|
||||||
#
|
#
|
||||||
@@ -13,11 +14,11 @@ x-logging: &default_logging
|
|||||||
options:
|
options:
|
||||||
max-size: '10m'
|
max-size: '10m'
|
||||||
max-file: '5'
|
max-file: '5'
|
||||||
|
name: lcbp3-gitea-runner
|
||||||
services:
|
services:
|
||||||
runner:
|
runner:
|
||||||
<<: *default_logging
|
<<: *default_logging
|
||||||
image: gitea/act_runner:0.2.11
|
image: gitea/act_runner:0.4.0
|
||||||
container_name: gitea-runner
|
container_name: gitea-runner
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
|
|||||||
+5
-3
@@ -13,6 +13,8 @@ x-logging: &default_logging
|
|||||||
max-size: '10m'
|
max-size: '10m'
|
||||||
max-file: '5'
|
max-file: '5'
|
||||||
|
|
||||||
|
name: lcbp3-monitoring
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
lcbp3:
|
lcbp3:
|
||||||
external: true
|
external: true
|
||||||
@@ -162,7 +164,7 @@ services:
|
|||||||
memory: 256M
|
memory: 256M
|
||||||
environment:
|
environment:
|
||||||
TZ: 'Asia/Bangkok'
|
TZ: 'Asia/Bangkok'
|
||||||
# H4: cAdvisor binds 8080 ภายใน container — map เป็น 8088 บน host
|
# H4: cAdvisor binds 8080 container map 8088 host
|
||||||
ports:
|
ports:
|
||||||
- '8088:8080'
|
- '8088:8080'
|
||||||
networks:
|
networks:
|
||||||
@@ -212,8 +214,8 @@ services:
|
|||||||
<<: [*restart_policy, *default_logging]
|
<<: [*restart_policy, *default_logging]
|
||||||
image: grafana/promtail:2.9.0
|
image: grafana/promtail:2.9.0
|
||||||
container_name: promtail
|
container_name: promtail
|
||||||
# L5: รันในฐานะ root เพราะต้องอ่าน /var/lib/docker/containers
|
# L5: root /var/lib/docker/containers
|
||||||
# ที่ mount เข้ามาแบบ read-only
|
# mount read-only
|
||||||
user: '0:0'
|
user: '0:0'
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
REGISTRY_ADMIN_USER=admin
|
REGISTRY_ADMIN_USER=admin
|
||||||
REGISTRY_ADMIN_PASSWORD=
|
REGISTRY_ADMIN_PASSWORD=
|
||||||
|
REGISTRY_HTTP_SECRET=
|
||||||
|
|||||||
+70
@@ -0,0 +1,70 @@
|
|||||||
|
# File: /volume1/np-dms/registry/docker-compose.yml
|
||||||
|
# DMS Container v1.8.0: Application name: lcbp3-registry
|
||||||
|
# Deploy on: ASUSTOR AS5403T
|
||||||
|
# Services: registry, portainer
|
||||||
|
# ============================================================
|
||||||
|
# ⚠️ ข้อกำหนด:
|
||||||
|
# - ต้องสร้าง Docker Network ก่อน: docker network create lcbp3
|
||||||
|
# - Registry ใช้ Port 5000 (domain: registry.np-dms.work)
|
||||||
|
# - Portainer ใช้ Port 9443 (domain: portainer.np-dms.work)
|
||||||
|
# ============================================================
|
||||||
|
x-restart: &restart_policy
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
x-logging: &default_logging
|
||||||
|
logging:
|
||||||
|
driver: 'json-file'
|
||||||
|
options:
|
||||||
|
max-size: '10m'
|
||||||
|
max-file: '5'
|
||||||
|
|
||||||
|
networks:
|
||||||
|
lcbp3:
|
||||||
|
external: true
|
||||||
|
|
||||||
|
services:
|
||||||
|
# 1. Docker Registry Engine
|
||||||
|
registry:
|
||||||
|
<<: [*restart_policy, *default_logging]
|
||||||
|
image: registry:2
|
||||||
|
container_name: registry
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: '0.5'
|
||||||
|
memory: 256M
|
||||||
|
environment:
|
||||||
|
TZ: 'Asia/Bangkok'
|
||||||
|
REGISTRY_STORAGE_DELETE_ENABLED: 'true'
|
||||||
|
# เพิ่มความปลอดภัยเบื้องต้น (ถ้าต้องการ) หรือจัดการเรื่อง CORS
|
||||||
|
# REGISTRY_HTTP_HEADERS_Access-Control-Allow-Origin: '[https://registry-ui.np-dms.work]'
|
||||||
|
# REGISTRY_HTTP_HEADERS_Access-Control-Allow-Methods: '[HEAD,GET,OPTIONS,DELETE]'
|
||||||
|
# REGISTRY_HTTP_HEADERS_Access-Control-Allow-Headers: '[Authorization,Accept,Cache-Control]'
|
||||||
|
ports:
|
||||||
|
- "5000:5000"
|
||||||
|
volumes:
|
||||||
|
- '/volume1/np-dms/registry/data:/var/lib/registry'
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "bin/registry", "garbage-collect", "--dry-run", "/etc/docker/registry/config.yml"] # Check config/binary readiness
|
||||||
|
interval: 1m
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
networks:
|
||||||
|
- lcbp3
|
||||||
|
|
||||||
|
# 2. Registry Browser UI
|
||||||
|
registry-ui:
|
||||||
|
<<: [*restart_policy, *default_logging]
|
||||||
|
image: joxit/docker-registry-ui:latest
|
||||||
|
container_name: registry-ui
|
||||||
|
ports:
|
||||||
|
- "8880:80"
|
||||||
|
environment:
|
||||||
|
- REGISTRY_TITLE=LCBP3-DMS Local Registry
|
||||||
|
- REGISTRY_URL=http://registry:5000
|
||||||
|
- SINGLE_REGISTRY=true
|
||||||
|
- DELETE_IMAGES=true # ยอมให้กดลบจากหน้า UI ได้
|
||||||
|
depends_on:
|
||||||
|
- registry
|
||||||
|
networks:
|
||||||
|
- lcbp3
|
||||||
+21
-10
@@ -8,7 +8,7 @@
|
|||||||
# - Registry ใช้ Port 5000 (domain: registry.np-dms.work)
|
# - Registry ใช้ Port 5000 (domain: registry.np-dms.work)
|
||||||
# - Portainer ใช้ Port 9443 (domain: portainer.np-dms.work)
|
# - Portainer ใช้ Port 9443 (domain: portainer.np-dms.work)
|
||||||
# ============================================================
|
# ============================================================
|
||||||
# 🔒 SECURITY (M6):
|
# SECURITY (M6):
|
||||||
# Registry เปิด htpasswd auth (ADR-016)
|
# Registry เปิด htpasswd auth (ADR-016)
|
||||||
# Prerequisite (ทำครั้งเดียวก่อน deploy):
|
# Prerequisite (ทำครั้งเดียวก่อน deploy):
|
||||||
# docker run --rm --entrypoint htpasswd httpd:2 -Bbn \
|
# docker run --rm --entrypoint htpasswd httpd:2 -Bbn \
|
||||||
@@ -26,7 +26,7 @@ x-logging: &default_logging
|
|||||||
options:
|
options:
|
||||||
max-size: '10m'
|
max-size: '10m'
|
||||||
max-file: '5'
|
max-file: '5'
|
||||||
|
name: lcbp3-registry
|
||||||
networks:
|
networks:
|
||||||
lcbp3:
|
lcbp3:
|
||||||
external: true
|
external: true
|
||||||
@@ -45,9 +45,8 @@ services:
|
|||||||
reservations:
|
reservations:
|
||||||
cpus: '0.1'
|
cpus: '0.1'
|
||||||
memory: 64M
|
memory: 64M
|
||||||
|
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- /share/np-dms/registry/.env
|
||||||
environment:
|
environment:
|
||||||
TZ: 'Asia/Bangkok'
|
TZ: 'Asia/Bangkok'
|
||||||
# --- Storage ---
|
# --- Storage ---
|
||||||
@@ -57,15 +56,17 @@ services:
|
|||||||
REGISTRY_AUTH: 'htpasswd'
|
REGISTRY_AUTH: 'htpasswd'
|
||||||
REGISTRY_AUTH_HTPASSWD_REALM: 'NP-DMS Registry'
|
REGISTRY_AUTH_HTPASSWD_REALM: 'NP-DMS Registry'
|
||||||
REGISTRY_AUTH_HTPASSWD_PATH: '/auth/htpasswd'
|
REGISTRY_AUTH_HTPASSWD_PATH: '/auth/htpasswd'
|
||||||
security_opt:
|
REGISTRY_HTTP_SECRET: ${REGISTRY_HTTP_SECRET}
|
||||||
- no-new-privileges:true
|
# security_opt:
|
||||||
|
# - no-new-privileges:true
|
||||||
ports:
|
ports:
|
||||||
- '5000:5000'
|
- '5000:5000'
|
||||||
volumes:
|
volumes:
|
||||||
- '/volume1/np-dms/registry/data:/var/lib/registry'
|
- '/volume1/np-dms/registry/data:/var/lib/registry'
|
||||||
- '/volume1/np-dms/registry/auth:/auth:ro'
|
- '/volume1/np-dms/registry/auth:/auth:ro'
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ['CMD', 'wget', '--spider', '-q', 'http://localhost:5000/v2/']
|
# test: ['CMD', 'wget', '--spider', '-q', 'http://localhost:5000/v2/']
|
||||||
|
test: ["CMD", "nc", "-z", "localhost", "5000"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
@@ -88,17 +89,27 @@ services:
|
|||||||
- '8880:80'
|
- '8880:80'
|
||||||
environment:
|
environment:
|
||||||
TZ: 'Asia/Bangkok'
|
TZ: 'Asia/Bangkok'
|
||||||
REGISTRY_TITLE: 'NP-DMS Registry'
|
REGISTRY_TITLE: ${DMS_REGISTRY_TITLE}
|
||||||
REGISTRY_URL: 'http://registry:5000'
|
# REGISTRY_URL: 'http://registry:5000'
|
||||||
|
NGINX_PROXY_PASS_URL: 'http://registry:5000'
|
||||||
SINGLE_REGISTRY: 'true'
|
SINGLE_REGISTRY: 'true'
|
||||||
DELETE_IMAGES: 'true'
|
DELETE_IMAGES: 'true'
|
||||||
|
# --- เพิ่มส่วนนี้เพื่อให้ UI คุยกับ Registry ที่มี Auth ได้ ---
|
||||||
|
# 1. อนุญาตให้ UI ส่งคำขอแบบมี Credentials
|
||||||
|
NGINX_PROXY_PASS_PARAMS: 'proxy_set_header Authorization $$http_authorization; proxy_pass_header Authorization;'
|
||||||
|
# 2. กรณีต้องการให้ UI จำรหัสผ่าน (Basic Auth) ไว้เลย (ใช้ค่าจาก .env)
|
||||||
|
REGISTRY_USER: ${DMS_REGISTRY_ADMIN_USER}
|
||||||
|
REGISTRY_PASSWORD: ${DMS_REGISTRY_ADMIN_PASSWORD}
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
registry:
|
registry:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
networks:
|
networks:
|
||||||
- lcbp3
|
- lcbp3
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ['CMD', 'wget', '--spider', '-q', 'http://localhost:80/']
|
# test: ['CMD', 'wget', '--spider', '-q', 'http://localhost:80/']
|
||||||
|
# test: ["CMD-SHELL", "wget --spider -q http://localhost/ || exit 1"]
|
||||||
|
test: ["CMD", "pgrep", "nginx"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
|||||||
@@ -43,15 +43,19 @@ services:
|
|||||||
image: lcbp3-backend:${BACKEND_IMAGE_TAG:-latest}
|
image: lcbp3-backend:${BACKEND_IMAGE_TAG:-latest}
|
||||||
container_name: backend
|
container_name: backend
|
||||||
# M4: container hardening
|
# M4: container hardening
|
||||||
user: 'node'
|
# user: 'node'
|
||||||
# L1: stdin_open/tty removed — production services ไม่ต้องใช้ interactive TTY
|
# L1: stdin_open/tty removed — production services ไม่ต้องใช้ interactive TTY
|
||||||
read_only: true
|
# read_only: true
|
||||||
tmpfs:
|
# tmpfs:
|
||||||
- /tmp:rw,noexec,nosuid,size=256m
|
# - /tmp:rw,noexec,nosuid,size=256m
|
||||||
security_opt:
|
# security_opt:
|
||||||
- no-new-privileges:true
|
# - no-new-privileges:true
|
||||||
cap_drop:
|
# cap_drop:
|
||||||
- ALL
|
# - ALL
|
||||||
|
# cap_add:
|
||||||
|
# - CHOWN
|
||||||
|
# - SETUID
|
||||||
|
# - SETGID
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
@@ -61,7 +65,7 @@ services:
|
|||||||
cpus: '0.5'
|
cpus: '0.5'
|
||||||
memory: 512M
|
memory: 512M
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- /share/np-dms/app/.env
|
||||||
environment:
|
environment:
|
||||||
TZ: 'Asia/Bangkok'
|
TZ: 'Asia/Bangkok'
|
||||||
NODE_ENV: 'production'
|
NODE_ENV: 'production'
|
||||||
@@ -123,15 +127,19 @@ services:
|
|||||||
image: lcbp3-frontend:${FRONTEND_IMAGE_TAG:-latest}
|
image: lcbp3-frontend:${FRONTEND_IMAGE_TAG:-latest}
|
||||||
container_name: frontend
|
container_name: frontend
|
||||||
# M4: container hardening (Next.js standalone runs as 'nextjs' user by default)
|
# M4: container hardening (Next.js standalone runs as 'nextjs' user by default)
|
||||||
user: 'nextjs'
|
# user: 'nextjs'
|
||||||
read_only: true
|
# read_only: true
|
||||||
tmpfs:
|
# tmpfs:
|
||||||
- /tmp:rw,noexec,nosuid,size=128m
|
# - /tmp:rw,noexec,nosuid,size=128m
|
||||||
- /app/.next/cache:rw,size=256m
|
# - /app/.next/cache:rw,size=256m
|
||||||
security_opt:
|
# security_opt:
|
||||||
- no-new-privileges:true
|
# - no-new-privileges:true
|
||||||
cap_drop:
|
# cap_drop:
|
||||||
- ALL
|
# - ALL
|
||||||
|
# cap_add:
|
||||||
|
# - CHOWN
|
||||||
|
# - SETUID
|
||||||
|
# - SETGID
|
||||||
# L1: stdin_open/tty removed
|
# L1: stdin_open/tty removed
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
@@ -142,7 +150,7 @@ services:
|
|||||||
cpus: '0.25'
|
cpus: '0.25'
|
||||||
memory: 512M
|
memory: 512M
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- /share/np-dms/app/.env
|
||||||
environment:
|
environment:
|
||||||
TZ: 'Asia/Bangkok'
|
TZ: 'Asia/Bangkok'
|
||||||
NODE_ENV: 'production'
|
NODE_ENV: 'production'
|
||||||
@@ -173,16 +181,16 @@ services:
|
|||||||
# ----------------------------------------------------------------
|
# ----------------------------------------------------------------
|
||||||
clamav:
|
clamav:
|
||||||
<<: [*restart_policy, *default_logging]
|
<<: [*restart_policy, *default_logging]
|
||||||
image: clamav/clamav:1.3
|
image: clamav/clamav:1.4.4
|
||||||
container_name: clamav
|
container_name: clamav
|
||||||
security_opt:
|
# security_opt:
|
||||||
- no-new-privileges:true
|
# - no-new-privileges:true
|
||||||
cap_drop:
|
# cap_drop:
|
||||||
- ALL
|
# - ALL
|
||||||
cap_add:
|
# cap_add:
|
||||||
- CHOWN
|
# - CHOWN
|
||||||
- SETUID
|
# - SETUID
|
||||||
- SETGID
|
# - SETGID
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
@@ -192,6 +200,8 @@ services:
|
|||||||
cpus: '0.25'
|
cpus: '0.25'
|
||||||
memory: 1G
|
memory: 1G
|
||||||
environment:
|
environment:
|
||||||
|
CLAMAV_NO_LOG_FILE: 'true' # ปิดการเขียนไฟล์ clamd.log
|
||||||
|
FRESHCLAM_NO_LOG_FILE: 'true' # ปิดการเขียนไฟล์ freshclam.log
|
||||||
TZ: 'Asia/Bangkok'
|
TZ: 'Asia/Bangkok'
|
||||||
CLAMAV_NO_FRESHCLAMD: 'false'
|
CLAMAV_NO_FRESHCLAMD: 'false'
|
||||||
CLAMAV_NO_CLAMD: 'false'
|
CLAMAV_NO_CLAMD: 'false'
|
||||||
@@ -208,3 +218,9 @@ services:
|
|||||||
timeout: 30s
|
timeout: 30s
|
||||||
retries: 3
|
retries: 3
|
||||||
start_period: 300s
|
start_period: 300s
|
||||||
|
|
||||||
|
# sudo chown -R 100:101 /share/np-dms/data/logs/clamav
|
||||||
|
# sudo chmod -R 755 /share/np-dms/data/logs/climax
|
||||||
|
|
||||||
|
# sudo chown -R 100:101 /share/np-dms/clamav/data
|
||||||
|
# sudo chmod -R 775 /share/np-dms/clamav/data
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# File: /share/np-dms/git/docker-compose.yml
|
# File: /share/np-dms/gitea/docker-compose.yml
|
||||||
# DMS Container v1.8.6 — Application: git, Service: gitea
|
# DMS Container v1.8.6 — Application name: lcbp3-git, Service: gitea
|
||||||
|
|
||||||
x-restart: &restart_policy
|
x-restart: &restart_policy
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@@ -21,8 +21,17 @@ networks:
|
|||||||
services:
|
services:
|
||||||
gitea:
|
gitea:
|
||||||
<<: [*restart_policy, *default_logging]
|
<<: [*restart_policy, *default_logging]
|
||||||
image: gitea/gitea:latest-rootless
|
image: gitea/gitea:1.26.0-rootless
|
||||||
container_name: gitea
|
container_name: gitea
|
||||||
|
# M4: container hardening (Gitea rootless runs as 'git' user)
|
||||||
|
# user: '1000:1000'
|
||||||
|
# tmpfs:
|
||||||
|
# - /tmp:rw,noexec,nosuid,size=256m
|
||||||
|
# - /var/run/gitea:rw,size=128m
|
||||||
|
# security_opt:
|
||||||
|
# - no-new-privileges:true
|
||||||
|
# cap_drop:
|
||||||
|
# - ALL
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
@@ -31,10 +40,8 @@ services:
|
|||||||
reservations:
|
reservations:
|
||||||
cpus: '0.25'
|
cpus: '0.25'
|
||||||
memory: 512M
|
memory: 512M
|
||||||
security_opt:
|
|
||||||
- no-new-privileges:true
|
|
||||||
env_file:
|
env_file:
|
||||||
- .env
|
- /share/np-dms/gitea/.env
|
||||||
environment:
|
environment:
|
||||||
# ---- File ownership in QNAP ----
|
# ---- File ownership in QNAP ----
|
||||||
USER_UID: '1000'
|
USER_UID: '1000'
|
||||||
@@ -78,13 +85,13 @@ services:
|
|||||||
- /etc/timezone:/etc/timezone:ro
|
- /etc/timezone:/etc/timezone:ro
|
||||||
- /etc/localtime:/etc/localtime:ro
|
- /etc/localtime:/etc/localtime:ro
|
||||||
ports:
|
ports:
|
||||||
- '3003:3000' # HTTP (ไปหลัง NPM)
|
- '3003:3000' # HTTP (to NPM)
|
||||||
- '2222:22' # SSH สำหรับ git clone/push
|
- '2222:22' # SSH for git clone/push
|
||||||
networks:
|
networks:
|
||||||
- lcbp3
|
- lcbp3
|
||||||
- giteanet
|
- giteanet
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ['CMD', 'wget', '--spider', '-q', 'http://localhost:3000/api/healthz']
|
test: ['CMD', 'curl', '-f', 'http://localhost:3000/api/healthz']
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
|||||||
+9
-9
@@ -1,9 +1,11 @@
|
|||||||
# File: /share/np-dms/mariadb/docker-compose-lcbp3-db.yml
|
# File: /share/np-dms/mariadb/docker-compose.yml
|
||||||
# DMS Container v1.8.6 : Application name: lcbp3-db, Service: mariadb, pma
|
# DMS Container v1.8.6 :
|
||||||
|
# Application name: lcbp3-db
|
||||||
|
# Service: mariadb pma
|
||||||
# ============================================================
|
# ============================================================
|
||||||
# SECURITY (ADR-016, Tier-1):
|
# 🔒 SECURITY (ADR-016, Tier-1):
|
||||||
# - root user / app user must use different passwords (least privilege)
|
# - root user / app user must use different passwords (least privilege)
|
||||||
# - host port 3306 bind only to 127.0.0.1 - other services use DNS 'mariadb:3306'
|
# - host port 3306 bind only to 127.0.0.1 — other services use DNS 'mariadb:3306'
|
||||||
# - PMA must be accessed via NPM (https://pma.np-dms.work) only
|
# - PMA must be accessed via NPM (https://pma.np-dms.work) only
|
||||||
# - set .env in same folder:
|
# - set .env in same folder:
|
||||||
# DB_ROOT_PASSWORD, DB_PASSWORD, NPM_DB_PASSWORD, GITEA_DB_PASSWORD, N8N_DB_PASSWORD
|
# DB_ROOT_PASSWORD, DB_PASSWORD, NPM_DB_PASSWORD, GITEA_DB_PASSWORD, N8N_DB_PASSWORD
|
||||||
@@ -17,9 +19,7 @@ x-logging: &default_logging
|
|||||||
options:
|
options:
|
||||||
max-size: '10m'
|
max-size: '10m'
|
||||||
max-file: '5'
|
max-file: '5'
|
||||||
|
|
||||||
name: lcbp3-db
|
name: lcbp3-db
|
||||||
|
|
||||||
services:
|
services:
|
||||||
mariadb:
|
mariadb:
|
||||||
<<: [*restart_policy, *default_logging]
|
<<: [*restart_policy, *default_logging]
|
||||||
@@ -45,9 +45,9 @@ services:
|
|||||||
MARIADB_USER: 'center'
|
MARIADB_USER: 'center'
|
||||||
MARIADB_PASSWORD: ${DB_PASSWORD:?DB_PASSWORD required}
|
MARIADB_PASSWORD: ${DB_PASSWORD:?DB_PASSWORD required}
|
||||||
TZ: 'Asia/Bangkok'
|
TZ: 'Asia/Bangkok'
|
||||||
# bind only to loopback for backup/migration on host - not exposed to LAN
|
# bind only to loopback for backup/migration on host — not exposed to LAN
|
||||||
ports:
|
ports:
|
||||||
- '127.0.0.1:3306:3306'
|
- '3306:3306'
|
||||||
networks:
|
networks:
|
||||||
- lcbp3
|
- lcbp3
|
||||||
volumes:
|
volumes:
|
||||||
@@ -78,7 +78,7 @@ services:
|
|||||||
PMA_ABSOLUTE_URI: 'https://pma.np-dms.work/'
|
PMA_ABSOLUTE_URI: 'https://pma.np-dms.work/'
|
||||||
UPLOAD_LIMIT: '1G'
|
UPLOAD_LIMIT: '1G'
|
||||||
MEMORY_LIMIT: '512M'
|
MEMORY_LIMIT: '512M'
|
||||||
# M7: pma accessible only via NPM (https://pma.np-dms.work) - do not publish port 89 to LAN
|
# M7: pma accessible only via NPM (https://pma.np-dms.work) — do not publish port 89 to LAN
|
||||||
expose:
|
expose:
|
||||||
- '80'
|
- '80'
|
||||||
networks:
|
networks:
|
||||||
+27
-43
@@ -6,64 +6,38 @@
|
|||||||
# - cadvisor:8080
|
# - cadvisor:8080
|
||||||
# H5: ไม่ publish ports ออก LAN, ตัด obsolete `version:` field, pin tags
|
# H5: ไม่ publish ports ออก LAN, ตัด obsolete `version:` field, pin tags
|
||||||
# ============================================================
|
# ============================================================
|
||||||
|
# Application name lcbp3-monitoring-exporter
|
||||||
x-restart: &restart_policy
|
version: '3.8'
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
x-logging: &default_logging
|
|
||||||
logging:
|
|
||||||
driver: 'json-file'
|
|
||||||
options:
|
|
||||||
max-size: '10m'
|
|
||||||
max-file: '5'
|
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
lcbp3:
|
lcbp3:
|
||||||
external: true
|
external: true
|
||||||
|
name: lcbp3-monitoring-exporter
|
||||||
services:
|
services:
|
||||||
node-exporter:
|
node-exporter:
|
||||||
<<: [*restart_policy, *default_logging]
|
image: prom/node-exporter:v1.7.0
|
||||||
image: prom/node-exporter:v1.8.2
|
|
||||||
container_name: node-exporter
|
container_name: node-exporter
|
||||||
deploy:
|
restart: unless-stopped
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
cpus: '0.5'
|
|
||||||
memory: 128M
|
|
||||||
environment:
|
|
||||||
TZ: 'Asia/Bangkok'
|
|
||||||
command:
|
command:
|
||||||
- '--path.procfs=/host/proc'
|
- '--path.procfs=/host/proc'
|
||||||
- '--path.sysfs=/host/sys'
|
- '--path.sysfs=/host/sys'
|
||||||
- '--collector.filesystem.mount-points-exclude=^/(sys|proc|dev|host|etc)($$|/)'
|
- '--collector.filesystem.mount-points-exclude=^/(sys|proc|dev|host|etc)($$|/)'
|
||||||
expose:
|
ports:
|
||||||
- '9100'
|
- "9100:9100"
|
||||||
networks:
|
networks:
|
||||||
- lcbp3
|
- lcbp3
|
||||||
volumes:
|
volumes:
|
||||||
- /proc:/host/proc:ro
|
- /proc:/host/proc:ro
|
||||||
- /sys:/host/sys:ro
|
- /sys:/host/sys:ro
|
||||||
- /:/rootfs:ro
|
- /:/rootfs:ro
|
||||||
healthcheck:
|
|
||||||
test: ['CMD', 'wget', '--spider', '-q', 'http://localhost:9100/metrics']
|
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
retries: 3
|
|
||||||
|
|
||||||
cadvisor:
|
cadvisor:
|
||||||
<<: [*restart_policy, *default_logging]
|
image: gcr.io/cadvisor/cadvisor:v0.47.2
|
||||||
image: gcr.io/cadvisor/cadvisor:v0.49.1
|
|
||||||
container_name: cadvisor
|
container_name: cadvisor
|
||||||
deploy:
|
restart: unless-stopped
|
||||||
resources:
|
privileged: true
|
||||||
limits:
|
ports:
|
||||||
cpus: '0.5'
|
- "8088:8080"
|
||||||
memory: 256M
|
|
||||||
environment:
|
|
||||||
TZ: 'Asia/Bangkok'
|
|
||||||
expose:
|
|
||||||
- '8080'
|
|
||||||
networks:
|
networks:
|
||||||
- lcbp3
|
- lcbp3
|
||||||
volumes:
|
volumes:
|
||||||
@@ -71,8 +45,18 @@ services:
|
|||||||
- /var/run:/var/run:ro
|
- /var/run:/var/run:ro
|
||||||
- /sys:/sys:ro
|
- /sys:/sys:ro
|
||||||
- /var/lib/docker/:/var/lib/docker:ro
|
- /var/lib/docker/:/var/lib/docker:ro
|
||||||
healthcheck:
|
- /sys/fs/cgroup:/sys/fs/cgroup:ro
|
||||||
test: ['CMD', 'wget', '--spider', '-q', 'http://localhost:8080/healthz']
|
|
||||||
interval: 30s
|
mysqld-exporter:
|
||||||
timeout: 10s
|
image: prom/mysqld-exporter:v0.15.0
|
||||||
retries: 3
|
container_name: mysqld-exporter
|
||||||
|
restart: unless-stopped
|
||||||
|
user: root
|
||||||
|
command:
|
||||||
|
- '--config.my-cnf=/etc/mysql/my.cnf'
|
||||||
|
ports:
|
||||||
|
- "9104:9104"
|
||||||
|
networks:
|
||||||
|
- lcbp3
|
||||||
|
volumes:
|
||||||
|
- "/share/np-dms/monitoring/mysqld-exporter/.my.cnf:/etc/mysql/my.cnf:ro"
|
||||||
|
|||||||
+56
@@ -0,0 +1,56 @@
|
|||||||
|
# File: /share/np-dms/monitoring/docker-compose.yml (QNAP)
|
||||||
|
# เฉพาะ exporters เท่านั้น - metrics ถูก scrape โดย Prometheus บน ASUSTOR
|
||||||
|
# Application name lcbp3-monitoring-exporter
|
||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
networks:
|
||||||
|
lcbp3:
|
||||||
|
external: true
|
||||||
|
|
||||||
|
services:
|
||||||
|
node-exporter:
|
||||||
|
image: prom/node-exporter:v1.7.0
|
||||||
|
container_name: node-exporter
|
||||||
|
restart: unless-stopped
|
||||||
|
command:
|
||||||
|
- '--path.procfs=/host/proc'
|
||||||
|
- '--path.sysfs=/host/sys'
|
||||||
|
- '--collector.filesystem.mount-points-exclude=^/(sys|proc|dev|host|etc)($$|/)'
|
||||||
|
ports:
|
||||||
|
- "9100:9100"
|
||||||
|
networks:
|
||||||
|
- lcbp3
|
||||||
|
volumes:
|
||||||
|
- /proc:/host/proc:ro
|
||||||
|
- /sys:/host/sys:ro
|
||||||
|
- /:/rootfs:ro
|
||||||
|
|
||||||
|
cadvisor:
|
||||||
|
image: gcr.io/cadvisor/cadvisor:v0.47.2
|
||||||
|
container_name: cadvisor
|
||||||
|
restart: unless-stopped
|
||||||
|
privileged: true
|
||||||
|
ports:
|
||||||
|
- "8088:8080"
|
||||||
|
networks:
|
||||||
|
- lcbp3
|
||||||
|
volumes:
|
||||||
|
- /:/rootfs:ro
|
||||||
|
- /var/run:/var/run:ro
|
||||||
|
- /sys:/sys:ro
|
||||||
|
- /var/lib/docker/:/var/lib/docker:ro
|
||||||
|
- /sys/fs/cgroup:/sys/fs/cgroup:ro
|
||||||
|
|
||||||
|
mysqld-exporter:
|
||||||
|
image: prom/mysqld-exporter:v0.15.0
|
||||||
|
container_name: mysqld-exporter
|
||||||
|
restart: unless-stopped
|
||||||
|
user: root
|
||||||
|
command:
|
||||||
|
- '--config.my-cnf=/etc/mysql/my.cnf'
|
||||||
|
ports:
|
||||||
|
- "9104:9104"
|
||||||
|
networks:
|
||||||
|
- lcbp3
|
||||||
|
volumes:
|
||||||
|
- "/share/np-dms/monitoring/mysqld-exporter/.my.cnf:/etc/mysql/my.cnf:ro"
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
# File: /share/np-dms/n8n/docker-compose.yml
|
# File: /share/np-dms/n8n/docker-compose.yml
|
||||||
# DMS Container v1.8.6 — Application: n8n
|
# DMS Container v1.8.6 — Application: n8n
|
||||||
# ============================================================
|
# ============================================================
|
||||||
# 🔒 SECURITY:
|
# ߔ⠓ECURITY:
|
||||||
# - secrets อยู่ใน .env (gitignored) — หลีกปัญหาการตีความหมาย `$` ใน YAML
|
# - secrets อยู่ใน .env (gitignored) — หลีกปัญหาการตีความหมาย `$` ใน YAML
|
||||||
# - n8n ไม่ได้ mount /var/run/docker.sock โดยตรง (H3)
|
# - n8n ไม่ได้ mount /var/run/docker.sock โดยตรง (H3)
|
||||||
# ใช้ docker-socket-proxy จำกัด capability — read-only Containers/Images API
|
# ใช้ docker-socket-proxy จำกัด capability — read-only Containers/Images API
|
||||||
@@ -113,9 +113,7 @@ services:
|
|||||||
|
|
||||||
n8n:
|
n8n:
|
||||||
<<: [*restart_policy, *default_logging]
|
<<: [*restart_policy, *default_logging]
|
||||||
build:
|
image: n8nio/n8n:2.16.1
|
||||||
context: ./n8n-custom
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
container_name: n8n
|
container_name: n8n
|
||||||
depends_on:
|
depends_on:
|
||||||
n8n-db:
|
n8n-db:
|
||||||
@@ -166,8 +164,6 @@ services:
|
|||||||
EXECUTIONS_DATA_PRUNE: 'true'
|
EXECUTIONS_DATA_PRUNE: 'true'
|
||||||
EXECUTIONS_DATA_MAX_AGE: 168
|
EXECUTIONS_DATA_MAX_AGE: 168
|
||||||
# EXECUTIONS_DATA_PRUNE_TIMEOUT: 60
|
# EXECUTIONS_DATA_PRUNE_TIMEOUT: 60
|
||||||
# Storage Migration (fix deprecation warning)
|
|
||||||
N8N_MIGRATE_FS_STORAGE_PATH: 'true'
|
|
||||||
|
|
||||||
ports:
|
ports:
|
||||||
- '5678:5678'
|
- '5678:5678'
|
||||||
|
|||||||
@@ -1,88 +1,4 @@
|
|||||||
# File: /share/np-dms/npm/docker-compose.yml
|
04-Infrastructure-OPS/04-00-docker-compose/QNAP/npm/docker-compose.yml
|
||||||
# DMS Container v1.8.6 — Application: lcbp3-npm, Service: npm + landing
|
|
||||||
x-restart: &restart_policy
|
|
||||||
restart: unless-stopped
|
|
||||||
|
|
||||||
x-logging: &default_logging
|
|
||||||
logging:
|
|
||||||
driver: 'json-file'
|
|
||||||
options:
|
|
||||||
max-size: '10m'
|
|
||||||
max-file: '5'
|
|
||||||
name: lcbp3-npm
|
|
||||||
services:
|
|
||||||
npm:
|
|
||||||
<<: [*restart_policy, *default_logging]
|
|
||||||
image: jc21/nginx-proxy-manager:2.11.3
|
|
||||||
container_name: npm
|
|
||||||
deploy:
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
cpus: '1.0'
|
|
||||||
memory: 512M
|
|
||||||
reservations:
|
|
||||||
cpus: '0.25'
|
|
||||||
memory: 128M
|
|
||||||
security_opt:
|
|
||||||
- no-new-privileges:true
|
|
||||||
ports:
|
|
||||||
- '80:80' # HTTP
|
|
||||||
- '443:443' # HTTPS
|
|
||||||
- '81:81' # NPM Admin UI
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
environment:
|
|
||||||
TZ: 'Asia/Bangkok'
|
|
||||||
DB_MYSQL_HOST: 'mariadb'
|
|
||||||
DB_MYSQL_PORT: 3306
|
|
||||||
DB_MYSQL_USER: 'npm'
|
|
||||||
# ⚠️ ADR-016: ห้ามใช้รหัสง่าย ๆ เช่น 'npm' — ตั้งใน .env (NPM_DB_PASSWORD)
|
|
||||||
DB_MYSQL_PASSWORD: ${NPM_DB_PASSWORD:?NPM_DB_PASSWORD required}
|
|
||||||
DB_MYSQL_NAME: 'npm'
|
|
||||||
# Uncomment this if IPv6 is not enabled on your host
|
|
||||||
DISABLE_IPV6: 'true'
|
|
||||||
networks:
|
|
||||||
- lcbp3
|
|
||||||
- giteanet
|
|
||||||
volumes:
|
|
||||||
- '/share/np-dms/npm/data:/data'
|
|
||||||
- '/share/dms-data/logs/npm:/data/logs'
|
|
||||||
- '/share/np-dms/npm/letsencrypt:/etc/letsencrypt'
|
|
||||||
- '/share/np-dms/npm/custom:/data/nginx/custom'
|
|
||||||
healthcheck:
|
|
||||||
test: ['CMD', 'curl', '-f', 'http://localhost:81/api/']
|
|
||||||
interval: 30s
|
|
||||||
timeout: 10s
|
|
||||||
retries: 3
|
|
||||||
start_period: 30s
|
|
||||||
|
|
||||||
landing:
|
|
||||||
<<: [*restart_policy, *default_logging]
|
|
||||||
image: nginx:1.27-alpine
|
|
||||||
container_name: landing
|
|
||||||
user: '0:0'
|
|
||||||
deploy:
|
|
||||||
resources:
|
|
||||||
limits:
|
|
||||||
cpus: '0.25'
|
|
||||||
memory: 128M
|
|
||||||
security_opt:
|
|
||||||
- no-new-privileges:true
|
|
||||||
volumes:
|
|
||||||
- '/share/np-dms/npm/landing:/usr/share/nginx/html:ro'
|
|
||||||
networks:
|
|
||||||
- lcbp3
|
|
||||||
healthcheck:
|
|
||||||
test: ['CMD', 'curl', '-f', 'http://localhost/']
|
|
||||||
interval: 30s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 3
|
|
||||||
networks:
|
|
||||||
lcbp3:
|
|
||||||
external: true
|
|
||||||
giteanet:
|
|
||||||
external: true
|
|
||||||
name: gitnet
|
|
||||||
|
|
||||||
# docker exec -it npm id
|
# docker exec -it npm id
|
||||||
# chown -R 0:0 /share/Container/npm
|
# chown -R 0:0 /share/Container/npm
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ services:
|
|||||||
# ----------------------------------------------------------------
|
# ----------------------------------------------------------------
|
||||||
cache:
|
cache:
|
||||||
<<: [*restart_policy, *default_logging]
|
<<: [*restart_policy, *default_logging]
|
||||||
image: redis:7-alpine # ใช้ Alpine image เพื่อให้มีขน
|
image: redis:7-alpine # ใช้ Alpine image เพื่อให้มีขนาดเล็ก
|
||||||
container_name: cache
|
container_name: cache
|
||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
@@ -86,7 +86,7 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
resources:
|
resources:
|
||||||
limits:
|
limits:
|
||||||
cpus: '2.0' # Elasticsearch ใช้ CPU และ Memory ค่อนข้างห
|
cpus: '2.0' # Elasticsearch ใช้ CPU และ Memory ค่อนข้างหนัก
|
||||||
memory: 4G
|
memory: 4G
|
||||||
reservations:
|
reservations:
|
||||||
cpus: '0.5'
|
cpus: '0.5'
|
||||||
|
|||||||
@@ -62,6 +62,48 @@ services:
|
|||||||
|
|
||||||
Otherwise, keep the inline anchor pattern (current repo-wide convention).
|
Otherwise, keep the inline anchor pattern (current repo-wide convention).
|
||||||
|
|
||||||
|
## Image Pinning Strategy
|
||||||
|
|
||||||
|
The LCBP3 platform uses a **hybrid image pinning approach**:
|
||||||
|
|
||||||
|
### Infrastructure Services (Pinned)
|
||||||
|
All infrastructure services use **explicitly pinned versions** for stability:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Examples
|
||||||
|
redis:7-alpine
|
||||||
|
elasticsearch:8.11.1
|
||||||
|
mariadb:11.8
|
||||||
|
gitea/gitea:1.22.3-rootless
|
||||||
|
n8nio/n8n:1.66.0
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rationale:**
|
||||||
|
- Infrastructure services evolve independently
|
||||||
|
- Breaking changes in Redis/Elasticsearch/MariaDB can cause data corruption
|
||||||
|
- Pinned versions ensure predictable behavior across deployments
|
||||||
|
|
||||||
|
### Application Services (Variable)
|
||||||
|
Application images use **environment variable tags** for CI/CD flexibility:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
backend:
|
||||||
|
image: lcbp3-backend:${BACKEND_IMAGE_TAG:-latest}
|
||||||
|
frontend:
|
||||||
|
image: lcbp3-frontend:${FRONTEND_IMAGE_TAG:-latest}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Rationale:**
|
||||||
|
- Application code changes frequently with each release
|
||||||
|
- CI pipelines inject SHA-specific tags per release
|
||||||
|
- `:latest` fallback enables local development
|
||||||
|
- Environment variable allows rollback to specific versions
|
||||||
|
|
||||||
|
### Version Control
|
||||||
|
- **Infrastructure versions** updated manually in compose files
|
||||||
|
- **Application versions** controlled via CI/CD pipeline environment variables
|
||||||
|
- **Release policy** documented in `04-08-release-management-policy.md`
|
||||||
|
|
||||||
## Secret Management Roadmap (S1)
|
## Secret Management Roadmap (S1)
|
||||||
|
|
||||||
Current: `env_file: .env` (gitignored) per stack.
|
Current: `env_file: .env` (gitignored) per stack.
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user