Compare commits
55 Commits
eeff27a511
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
48ed74a27b | ||
|
|
95ee94997f | ||
|
|
9c1e175b76 | ||
|
|
78370fb590 | ||
|
|
ec35521258 | ||
|
|
d964546c8d | ||
|
|
2473c4c474 | ||
|
|
3fa28bd14f | ||
|
|
c8a0f281ef | ||
|
|
aa96cd90e3 | ||
| 8aceced902 | |||
|
|
863a727756 | ||
| dcd126d704 | |||
| 32d820ea6b | |||
|
|
5c49bac772 | ||
|
|
be3b71007a | ||
|
|
fea8ed6b80 | ||
|
|
c5250f3e70 | ||
|
|
7e846bf045 | ||
|
|
1123114f15 | ||
|
|
381ffa44d7 | ||
|
|
0aaa139145 | ||
| 7dce419745 | |||
| 9220884936 | |||
| 2865bebdb1 | |||
|
|
dc8b80c5f9 | ||
| 05f8f4403a | |||
|
|
3e6e810620 | ||
| 1817158f25 | |||
|
|
9dcdba0bb3 | ||
| 5acc631994 | |||
|
|
d62beaa1bd | ||
| 9fc7f692d9 | |||
|
|
fc0580e14c | ||
| 71c091055a | |||
|
|
241022ada6 | ||
| eff0169c21 | |||
|
|
a78c9941be | ||
|
|
f7a43600a3 | ||
| b22d00877e | |||
| 305f66e23c | |||
|
|
4f3aa87a93 | ||
|
|
6abb746e08 | ||
| 304f7fddf6 | |||
|
|
0a0c6645d5 | ||
| 647c35d8e4 | |||
|
|
5ede126cb8 | ||
| 582ecb5741 | |||
|
|
553c2d13ad | ||
|
|
4f45a69ed0 | ||
| 9360d78ea6 | |||
| 23006898d9 | |||
| 17d9f172d4 | |||
|
|
a3474bff6a | ||
|
|
bf0308e350 |
62
.agent/rules/00-project-specs.md
Normal file
62
.agent/rules/00-project-specs.md
Normal file
@@ -0,0 +1,62 @@
|
||||
---
|
||||
trigger: always_on
|
||||
---
|
||||
|
||||
# Project Specifications & Context Protocol
|
||||
|
||||
Description: Enforces strict adherence to the project's documentation structure (specs/00-06) for all agent activities.
|
||||
Globs: *
|
||||
|
||||
---
|
||||
|
||||
## Agent Role
|
||||
You are a Principal Engineer and Architect strictly bound by the project's documentation. You do not improvise outside of the defined specifications.
|
||||
|
||||
## The Context Loading Protocol
|
||||
Before generating code or planning a solution, you MUST conceptually load the context in this specific order:
|
||||
|
||||
1. **🎯 ACTIVE TASK (`specs/06-tasks/`)**
|
||||
- Identify the current active task file.
|
||||
- *Action:* Determine the immediate scope. Do NOT implement features not listed here.
|
||||
|
||||
2. **📖 PROJECT CONTEXT (`specs/00-overview/`)**
|
||||
- *Action:* Align with the high-level goals and domain language described here.
|
||||
|
||||
3. **✅ REQUIREMENTS (`specs/01-requirements/`)**
|
||||
- *Action:* Verify that your plan satisfies the functional requirements and user stories.
|
||||
- *Constraint:* If a requirement is ambiguous, stop and ask.
|
||||
|
||||
4. **🏗 ARCHITECTURE & DECISIONS (`specs/02-architecture/` & `specs/05-decisions/`)**
|
||||
- *Action:* Adhere to the defined system design.
|
||||
- *Crucial:* Check `specs/05-decisions/` (ADRs) to ensure you do not violate previously agreed-upon technical decisions.
|
||||
|
||||
5. **💾 DATABASE & SCHEMA (`specs/07-databasee/`)**
|
||||
- *Action:* - **Read `specs/07-database/lcbp3-v1.6.0-schema.sql`** (or relevant `.sql` files) for exact table structures and constraints.
|
||||
- **Consult `specs/07-database/data-dictionary-v1.6.0.md`** for field meanings and business rules.
|
||||
- **Check `specs/07-database/lcbp3-v1.6.0-seed.sql`** to understand initial data states.
|
||||
- *Constraint:* NEVER invent table names or columns. Use ONLY what is defined here.
|
||||
|
||||
6. **⚙️ IMPLEMENTATION DETAILS (`specs/03-implementation/`)**
|
||||
- *Action:* Follow Tech Stack, Naming Conventions, and Code Patterns.
|
||||
|
||||
7. **🚀 OPERATIONS (`specs/04-operations/`)**
|
||||
- *Action:* Ensure deployability and configuration compliance.
|
||||
|
||||
## Execution Rules
|
||||
|
||||
### 1. Citation Requirement
|
||||
When proposing a change or writing code, you must explicitly reference the source of truth:
|
||||
> "Implementing feature X per `specs/01-requirements/README.md` and `specs/01-requirements/**.md` using pattern defined in `specs/03-implementation/**.md`."
|
||||
|
||||
### 2. Conflict Resolution
|
||||
- **Spec vs. Training Data:** The `specs/` folder ALWAYS supersedes your general training data.
|
||||
- **Spec vs. User Prompt:** If a user prompt contradicts `specs/05-decisions/`, warn the user before proceeding.
|
||||
|
||||
### 3. File Generation
|
||||
- Do not create new files outside of the structure defined.
|
||||
- Keep the code style consistent with `specs/03-implementation/`.
|
||||
|
||||
### 4. Data Migration
|
||||
- Do not migrate. The schema can be modified directly.
|
||||
|
||||
---
|
||||
20
.agent/rules/01-code-execution.md
Normal file
20
.agent/rules/01-code-execution.md
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
trigger: always_on
|
||||
---
|
||||
|
||||
---
|
||||
|
||||
description: Control which shell commands the agent may run automatically.
|
||||
allowAuto: ["pnpm test:watch", "pnpm test:debug", "pnpm test:e2e", "git status"]
|
||||
denyAuto: ["rm -rf", "Remove-Item", "git push --force", "curl | bash"]
|
||||
alwaysReview: true
|
||||
scopes: ["backend/src/**", "backend/test/**", "frontend/app/**"]
|
||||
|
||||
---
|
||||
|
||||
# Execution Rules
|
||||
|
||||
- Only auto-execute commands that are explicitly listed in `allowAuto`.
|
||||
- Commands in denyAuto must always be blocked, even if manually requested.
|
||||
- All shell operations that create, modify, or delete files in `backend/src/` or `backend/test/` or `frontend/app/`require human review.
|
||||
- Alert if environment variables related to DB connection or secrets would be displayed or logged.
|
||||
9
.aignore
Normal file
9
.aignore
Normal file
@@ -0,0 +1,9 @@
|
||||
node_modules/
|
||||
backend/node_modules/
|
||||
frontend/node_modules/
|
||||
backend/dist/
|
||||
frontend/dist/
|
||||
backend/build/
|
||||
frontend/build/
|
||||
.git/
|
||||
*.log
|
||||
41
.gemini/GEMINI.md
Normal file
41
.gemini/GEMINI.md
Normal file
@@ -0,0 +1,41 @@
|
||||
---
|
||||
trigger: always_on
|
||||
---
|
||||
|
||||
# NAP-DMS Project Context & Rules
|
||||
|
||||
## 🧠 Role & Persona
|
||||
|
||||
Act as a **Senior Full Stack Developer** expert in **NestJS**, **Next.js**, and **TypeScript**.
|
||||
You value **Data Integrity**, **Security**, and **Clean Architecture**.
|
||||
|
||||
## 🏗️ Project Overview
|
||||
|
||||
This is **LCBP3-DMS (Laem Chabang Port Phase 3 - Document Management System)**.
|
||||
|
||||
- **Goal:** Manage construction documents (Correspondence, RFA, Drawings) with complex approval workflows.
|
||||
- **Infrastructure:** Deployed on QNAP Server via Docker Container Station.
|
||||
|
||||
## 💻 Tech Stack & Constraints
|
||||
|
||||
- **Backend:** NestJS (Modular Architecture), TypeORM, MariaDB 10.11, Redis 7.2 (BullMQ), Elasticsearch 8.11, JWT (JSON Web Tokens), CASL (4-Level RBAC).
|
||||
- **Frontend:** Next.js 14+ (App Router), Tailwind CSS, Shadcn/UI, React Context / Zustand, React Hook Form + Zod, Axios.
|
||||
- **Language:** TypeScript (Strict Mode). **NO `any` types allowed.**
|
||||
|
||||
## 🛡️ Security & Integrity Rules
|
||||
|
||||
1. **Idempotency:** All critical POST/PUT requests MUST check for `Idempotency-Key` header.
|
||||
2. **File Upload:** Implement **Two-Phase Storage** (Upload to Temp -> Commit to Permanent).
|
||||
3. **Race Conditions:** Use **Redis Lock** + **Optimistic Locking** for Document Numbering generation.
|
||||
4. **Validation:** Use Zod or Class-validator for all inputs.
|
||||
|
||||
## workflow Guidelines
|
||||
|
||||
- When implementing strictly follow the documents in `specs/`.
|
||||
- Always verify database schema against `specs/07-database/` before writing queries.
|
||||
|
||||
## 🚫 Forbidden Actions
|
||||
|
||||
- DO NOT use SQL Triggers (Business logic must be in NestJS services).
|
||||
- DO NOT use `.env` files for production configuration (Use Docker environment variables).
|
||||
- DO NOT generate code that violates OWASP Top 10 security practices.
|
||||
188
.gemini/commands/speckit.analyze.toml
Normal file
188
.gemini/commands/speckit.analyze.toml
Normal file
@@ -0,0 +1,188 @@
|
||||
description = "Perform a non-destructive cross-artifact consistency and quality analysis across spec.md, plan.md, and tasks.md after task generation."
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Perform a non-destructive cross-artifact consistency and quality analysis across spec.md, plan.md, and tasks.md after task generation.
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Goal
|
||||
|
||||
Identify inconsistencies, duplications, ambiguities, and underspecified items across the three core artifacts (`spec.md`, `plan.md`, `tasks.md`) before implementation. This command MUST run only after `/speckit.tasks` has successfully produced a complete `tasks.md`.
|
||||
|
||||
## Operating Constraints
|
||||
|
||||
**STRICTLY READ-ONLY**: Do **not** modify any files. Output a structured analysis report. Offer an optional remediation plan (user must explicitly approve before any follow-up editing commands would be invoked manually).
|
||||
|
||||
**Constitution Authority**: The project constitution (`.specify/memory/constitution.md`) is **non-negotiable** within this analysis scope. Constitution conflicts are automatically CRITICAL and require adjustment of the spec, plan, or tasks—not dilution, reinterpretation, or silent ignoring of the principle. If a principle itself needs to change, that must occur in a separate, explicit constitution update outside `/speckit.analyze`.
|
||||
|
||||
## Execution Steps
|
||||
|
||||
### 1. Initialize Analysis Context
|
||||
|
||||
Run `.specify/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks` once from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS. Derive absolute paths:
|
||||
|
||||
- SPEC = FEATURE_DIR/spec.md
|
||||
- PLAN = FEATURE_DIR/plan.md
|
||||
- TASKS = FEATURE_DIR/tasks.md
|
||||
|
||||
Abort with an error message if any required file is missing (instruct the user to run missing prerequisite command).
|
||||
For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||
|
||||
### 2. Load Artifacts (Progressive Disclosure)
|
||||
|
||||
Load only the minimal necessary context from each artifact:
|
||||
|
||||
**From spec.md:**
|
||||
|
||||
- Overview/Context
|
||||
- Functional Requirements
|
||||
- Non-Functional Requirements
|
||||
- User Stories
|
||||
- Edge Cases (if present)
|
||||
|
||||
**From plan.md:**
|
||||
|
||||
- Architecture/stack choices
|
||||
- Data Model references
|
||||
- Phases
|
||||
- Technical constraints
|
||||
|
||||
**From tasks.md:**
|
||||
|
||||
- Task IDs
|
||||
- Descriptions
|
||||
- Phase grouping
|
||||
- Parallel markers [P]
|
||||
- Referenced file paths
|
||||
|
||||
**From constitution:**
|
||||
|
||||
- Load `.specify/memory/constitution.md` for principle validation
|
||||
|
||||
### 3. Build Semantic Models
|
||||
|
||||
Create internal representations (do not include raw artifacts in output):
|
||||
|
||||
- **Requirements inventory**: Each functional + non-functional requirement with a stable key (derive slug based on imperative phrase; e.g., "User can upload file" → `user-can-upload-file`)
|
||||
- **User story/action inventory**: Discrete user actions with acceptance criteria
|
||||
- **Task coverage mapping**: Map each task to one or more requirements or stories (inference by keyword / explicit reference patterns like IDs or key phrases)
|
||||
- **Constitution rule set**: Extract principle names and MUST/SHOULD normative statements
|
||||
|
||||
### 4. Detection Passes (Token-Efficient Analysis)
|
||||
|
||||
Focus on high-signal findings. Limit to 50 findings total; aggregate remainder in overflow summary.
|
||||
|
||||
#### A. Duplication Detection
|
||||
|
||||
- Identify near-duplicate requirements
|
||||
- Mark lower-quality phrasing for consolidation
|
||||
|
||||
#### B. Ambiguity Detection
|
||||
|
||||
- Flag vague adjectives (fast, scalable, secure, intuitive, robust) lacking measurable criteria
|
||||
- Flag unresolved placeholders (TODO, TKTK, ???, `<placeholder>`, etc.)
|
||||
|
||||
#### C. Underspecification
|
||||
|
||||
- Requirements with verbs but missing object or measurable outcome
|
||||
- User stories missing acceptance criteria alignment
|
||||
- Tasks referencing files or components not defined in spec/plan
|
||||
|
||||
#### D. Constitution Alignment
|
||||
|
||||
- Any requirement or plan element conflicting with a MUST principle
|
||||
- Missing mandated sections or quality gates from constitution
|
||||
|
||||
#### E. Coverage Gaps
|
||||
|
||||
- Requirements with zero associated tasks
|
||||
- Tasks with no mapped requirement/story
|
||||
- Non-functional requirements not reflected in tasks (e.g., performance, security)
|
||||
|
||||
#### F. Inconsistency
|
||||
|
||||
- Terminology drift (same concept named differently across files)
|
||||
- Data entities referenced in plan but absent in spec (or vice versa)
|
||||
- Task ordering contradictions (e.g., integration tasks before foundational setup tasks without dependency note)
|
||||
- Conflicting requirements (e.g., one requires Next.js while other specifies Vue)
|
||||
|
||||
### 5. Severity Assignment
|
||||
|
||||
Use this heuristic to prioritize findings:
|
||||
|
||||
- **CRITICAL**: Violates constitution MUST, missing core spec artifact, or requirement with zero coverage that blocks baseline functionality
|
||||
- **HIGH**: Duplicate or conflicting requirement, ambiguous security/performance attribute, untestable acceptance criterion
|
||||
- **MEDIUM**: Terminology drift, missing non-functional task coverage, underspecified edge case
|
||||
- **LOW**: Style/wording improvements, minor redundancy not affecting execution order
|
||||
|
||||
### 6. Produce Compact Analysis Report
|
||||
|
||||
Output a Markdown report (no file writes) with the following structure:
|
||||
|
||||
## Specification Analysis Report
|
||||
|
||||
| ID | Category | Severity | Location(s) | Summary | Recommendation |
|
||||
|----|----------|----------|-------------|---------|----------------|
|
||||
| A1 | Duplication | HIGH | spec.md:L120-134 | Two similar requirements ... | Merge phrasing; keep clearer version |
|
||||
|
||||
(Add one row per finding; generate stable IDs prefixed by category initial.)
|
||||
|
||||
**Coverage Summary Table:**
|
||||
|
||||
| Requirement Key | Has Task? | Task IDs | Notes |
|
||||
|-----------------|-----------|----------|-------|
|
||||
|
||||
**Constitution Alignment Issues:** (if any)
|
||||
|
||||
**Unmapped Tasks:** (if any)
|
||||
|
||||
**Metrics:**
|
||||
|
||||
- Total Requirements
|
||||
- Total Tasks
|
||||
- Coverage % (requirements with >=1 task)
|
||||
- Ambiguity Count
|
||||
- Duplication Count
|
||||
- Critical Issues Count
|
||||
|
||||
### 7. Provide Next Actions
|
||||
|
||||
At end of report, output a concise Next Actions block:
|
||||
|
||||
- If CRITICAL issues exist: Recommend resolving before `/speckit.implement`
|
||||
- If only LOW/MEDIUM: User may proceed, but provide improvement suggestions
|
||||
- Provide explicit command suggestions: e.g., "Run /speckit.specify with refinement", "Run /speckit.plan to adjust architecture", "Manually edit tasks.md to add coverage for 'performance-metrics'"
|
||||
|
||||
### 8. Offer Remediation
|
||||
|
||||
Ask the user: "Would you like me to suggest concrete remediation edits for the top N issues?" (Do NOT apply them automatically.)
|
||||
|
||||
## Operating Principles
|
||||
|
||||
### Context Efficiency
|
||||
|
||||
- **Minimal high-signal tokens**: Focus on actionable findings, not exhaustive documentation
|
||||
- **Progressive disclosure**: Load artifacts incrementally; don't dump all content into analysis
|
||||
- **Token-efficient output**: Limit findings table to 50 rows; summarize overflow
|
||||
- **Deterministic results**: Rerunning without changes should produce consistent IDs and counts
|
||||
|
||||
### Analysis Guidelines
|
||||
|
||||
- **NEVER modify files** (this is read-only analysis)
|
||||
- **NEVER hallucinate missing sections** (if absent, report them accurately)
|
||||
- **Prioritize constitution violations** (these are always CRITICAL)
|
||||
- **Use examples over exhaustive rules** (cite specific instances, not generic patterns)
|
||||
- **Report zero issues gracefully** (emit success report with coverage statistics)
|
||||
|
||||
## Context
|
||||
|
||||
{{args}}
|
||||
"""
|
||||
298
.gemini/commands/speckit.checklist.toml
Normal file
298
.gemini/commands/speckit.checklist.toml
Normal file
@@ -0,0 +1,298 @@
|
||||
description = "Generate a custom checklist for the current feature based on user requirements."
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Generate a custom checklist for the current feature based on user requirements.
|
||||
---
|
||||
|
||||
## Checklist Purpose: "Unit Tests for English"
|
||||
|
||||
**CRITICAL CONCEPT**: Checklists are **UNIT TESTS FOR REQUIREMENTS WRITING** - they validate the quality, clarity, and completeness of requirements in a given domain.
|
||||
|
||||
**NOT for verification/testing**:
|
||||
|
||||
- ❌ NOT "Verify the button clicks correctly"
|
||||
- ❌ NOT "Test error handling works"
|
||||
- ❌ NOT "Confirm the API returns 200"
|
||||
- ❌ NOT checking if code/implementation matches the spec
|
||||
|
||||
**FOR requirements quality validation**:
|
||||
|
||||
- ✅ "Are visual hierarchy requirements defined for all card types?" (completeness)
|
||||
- ✅ "Is 'prominent display' quantified with specific sizing/positioning?" (clarity)
|
||||
- ✅ "Are hover state requirements consistent across all interactive elements?" (consistency)
|
||||
- ✅ "Are accessibility requirements defined for keyboard navigation?" (coverage)
|
||||
- ✅ "Does the spec define what happens when logo image fails to load?" (edge cases)
|
||||
|
||||
**Metaphor**: If your spec is code written in English, the checklist is its unit test suite. You're testing whether the requirements are well-written, complete, unambiguous, and ready for implementation - NOT whether the implementation works.
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Execution Steps
|
||||
|
||||
1. **Setup**: Run `.specify/scripts/powershell/check-prerequisites.ps1 -Json` from repo root and parse JSON for FEATURE_DIR and AVAILABLE_DOCS list.
|
||||
- All file paths must be absolute.
|
||||
- For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||
|
||||
2. **Clarify intent (dynamic)**: Derive up to THREE initial contextual clarifying questions (no pre-baked catalog). They MUST:
|
||||
- Be generated from the user's phrasing + extracted signals from spec/plan/tasks
|
||||
- Only ask about information that materially changes checklist content
|
||||
- Be skipped individually if already unambiguous in `$ARGUMENTS`
|
||||
- Prefer precision over breadth
|
||||
|
||||
Generation algorithm:
|
||||
1. Extract signals: feature domain keywords (e.g., auth, latency, UX, API), risk indicators ("critical", "must", "compliance"), stakeholder hints ("QA", "review", "security team"), and explicit deliverables ("a11y", "rollback", "contracts").
|
||||
2. Cluster signals into candidate focus areas (max 4) ranked by relevance.
|
||||
3. Identify probable audience & timing (author, reviewer, QA, release) if not explicit.
|
||||
4. Detect missing dimensions: scope breadth, depth/rigor, risk emphasis, exclusion boundaries, measurable acceptance criteria.
|
||||
5. Formulate questions chosen from these archetypes:
|
||||
- Scope refinement (e.g., "Should this include integration touchpoints with X and Y or stay limited to local module correctness?")
|
||||
- Risk prioritization (e.g., "Which of these potential risk areas should receive mandatory gating checks?")
|
||||
- Depth calibration (e.g., "Is this a lightweight pre-commit sanity list or a formal release gate?")
|
||||
- Audience framing (e.g., "Will this be used by the author only or peers during PR review?")
|
||||
- Boundary exclusion (e.g., "Should we explicitly exclude performance tuning items this round?")
|
||||
- Scenario class gap (e.g., "No recovery flows detected—are rollback / partial failure paths in scope?")
|
||||
|
||||
Question formatting rules:
|
||||
- If presenting options, generate a compact table with columns: Option | Candidate | Why It Matters
|
||||
- Limit to A–E options maximum; omit table if a free-form answer is clearer
|
||||
- Never ask the user to restate what they already said
|
||||
- Avoid speculative categories (no hallucination). If uncertain, ask explicitly: "Confirm whether X belongs in scope."
|
||||
|
||||
Defaults when interaction impossible:
|
||||
- Depth: Standard
|
||||
- Audience: Reviewer (PR) if code-related; Author otherwise
|
||||
- Focus: Top 2 relevance clusters
|
||||
|
||||
Output the questions (label Q1/Q2/Q3). After answers: if ≥2 scenario classes (Alternate / Exception / Recovery / Non-Functional domain) remain unclear, you MAY ask up to TWO more targeted follow‑ups (Q4/Q5) with a one-line justification each (e.g., "Unresolved recovery path risk"). Do not exceed five total questions. Skip escalation if user explicitly declines more.
|
||||
|
||||
3. **Understand user request**: Combine `$ARGUMENTS` + clarifying answers:
|
||||
- Derive checklist theme (e.g., security, review, deploy, ux)
|
||||
- Consolidate explicit must-have items mentioned by user
|
||||
- Map focus selections to category scaffolding
|
||||
- Infer any missing context from spec/plan/tasks (do NOT hallucinate)
|
||||
|
||||
4. **Load feature context**: Read from FEATURE_DIR:
|
||||
- spec.md: Feature requirements and scope
|
||||
- plan.md (if exists): Technical details, dependencies
|
||||
- tasks.md (if exists): Implementation tasks
|
||||
|
||||
**Context Loading Strategy**:
|
||||
- Load only necessary portions relevant to active focus areas (avoid full-file dumping)
|
||||
- Prefer summarizing long sections into concise scenario/requirement bullets
|
||||
- Use progressive disclosure: add follow-on retrieval only if gaps detected
|
||||
- If source docs are large, generate interim summary items instead of embedding raw text
|
||||
|
||||
5. **Generate checklist** - Create "Unit Tests for Requirements":
|
||||
- Create `FEATURE_DIR/checklists/` directory if it doesn't exist
|
||||
- Generate unique checklist filename:
|
||||
- Use short, descriptive name based on domain (e.g., `ux.md`, `api.md`, `security.md`)
|
||||
- Format: `[domain].md`
|
||||
- If file exists, append to existing file
|
||||
- Number items sequentially starting from CHK001
|
||||
- Each `/speckit.checklist` run creates a NEW file (never overwrites existing checklists)
|
||||
|
||||
**CORE PRINCIPLE - Test the Requirements, Not the Implementation**:
|
||||
Every checklist item MUST evaluate the REQUIREMENTS THEMSELVES for:
|
||||
- **Completeness**: Are all necessary requirements present?
|
||||
- **Clarity**: Are requirements unambiguous and specific?
|
||||
- **Consistency**: Do requirements align with each other?
|
||||
- **Measurability**: Can requirements be objectively verified?
|
||||
- **Coverage**: Are all scenarios/edge cases addressed?
|
||||
|
||||
**Category Structure** - Group items by requirement quality dimensions:
|
||||
- **Requirement Completeness** (Are all necessary requirements documented?)
|
||||
- **Requirement Clarity** (Are requirements specific and unambiguous?)
|
||||
- **Requirement Consistency** (Do requirements align without conflicts?)
|
||||
- **Acceptance Criteria Quality** (Are success criteria measurable?)
|
||||
- **Scenario Coverage** (Are all flows/cases addressed?)
|
||||
- **Edge Case Coverage** (Are boundary conditions defined?)
|
||||
- **Non-Functional Requirements** (Performance, Security, Accessibility, etc. - are they specified?)
|
||||
- **Dependencies & Assumptions** (Are they documented and validated?)
|
||||
- **Ambiguities & Conflicts** (What needs clarification?)
|
||||
|
||||
**HOW TO WRITE CHECKLIST ITEMS - "Unit Tests for English"**:
|
||||
|
||||
❌ **WRONG** (Testing implementation):
|
||||
- "Verify landing page displays 3 episode cards"
|
||||
- "Test hover states work on desktop"
|
||||
- "Confirm logo click navigates home"
|
||||
|
||||
✅ **CORRECT** (Testing requirements quality):
|
||||
- "Are the exact number and layout of featured episodes specified?" [Completeness]
|
||||
- "Is 'prominent display' quantified with specific sizing/positioning?" [Clarity]
|
||||
- "Are hover state requirements consistent across all interactive elements?" [Consistency]
|
||||
- "Are keyboard navigation requirements defined for all interactive UI?" [Coverage]
|
||||
- "Is the fallback behavior specified when logo image fails to load?" [Edge Cases]
|
||||
- "Are loading states defined for asynchronous episode data?" [Completeness]
|
||||
- "Does the spec define visual hierarchy for competing UI elements?" [Clarity]
|
||||
|
||||
**ITEM STRUCTURE**:
|
||||
Each item should follow this pattern:
|
||||
- Question format asking about requirement quality
|
||||
- Focus on what's WRITTEN (or not written) in the spec/plan
|
||||
- Include quality dimension in brackets [Completeness/Clarity/Consistency/etc.]
|
||||
- Reference spec section `[Spec §X.Y]` when checking existing requirements
|
||||
- Use `[Gap]` marker when checking for missing requirements
|
||||
|
||||
**EXAMPLES BY QUALITY DIMENSION**:
|
||||
|
||||
Completeness:
|
||||
- "Are error handling requirements defined for all API failure modes? [Gap]"
|
||||
- "Are accessibility requirements specified for all interactive elements? [Completeness]"
|
||||
- "Are mobile breakpoint requirements defined for responsive layouts? [Gap]"
|
||||
|
||||
Clarity:
|
||||
- "Is 'fast loading' quantified with specific timing thresholds? [Clarity, Spec §NFR-2]"
|
||||
- "Are 'related episodes' selection criteria explicitly defined? [Clarity, Spec §FR-5]"
|
||||
- "Is 'prominent' defined with measurable visual properties? [Ambiguity, Spec §FR-4]"
|
||||
|
||||
Consistency:
|
||||
- "Do navigation requirements align across all pages? [Consistency, Spec §FR-10]"
|
||||
- "Are card component requirements consistent between landing and detail pages? [Consistency]"
|
||||
|
||||
Coverage:
|
||||
- "Are requirements defined for zero-state scenarios (no episodes)? [Coverage, Edge Case]"
|
||||
- "Are concurrent user interaction scenarios addressed? [Coverage, Gap]"
|
||||
- "Are requirements specified for partial data loading failures? [Coverage, Exception Flow]"
|
||||
|
||||
Measurability:
|
||||
- "Are visual hierarchy requirements measurable/testable? [Acceptance Criteria, Spec §FR-1]"
|
||||
- "Can 'balanced visual weight' be objectively verified? [Measurability, Spec §FR-2]"
|
||||
|
||||
**Scenario Classification & Coverage** (Requirements Quality Focus):
|
||||
- Check if requirements exist for: Primary, Alternate, Exception/Error, Recovery, Non-Functional scenarios
|
||||
- For each scenario class, ask: "Are [scenario type] requirements complete, clear, and consistent?"
|
||||
- If scenario class missing: "Are [scenario type] requirements intentionally excluded or missing? [Gap]"
|
||||
- Include resilience/rollback when state mutation occurs: "Are rollback requirements defined for migration failures? [Gap]"
|
||||
|
||||
**Traceability Requirements**:
|
||||
- MINIMUM: ≥80% of items MUST include at least one traceability reference
|
||||
- Each item should reference: spec section `[Spec §X.Y]`, or use markers: `[Gap]`, `[Ambiguity]`, `[Conflict]`, `[Assumption]`
|
||||
- If no ID system exists: "Is a requirement & acceptance criteria ID scheme established? [Traceability]"
|
||||
|
||||
**Surface & Resolve Issues** (Requirements Quality Problems):
|
||||
Ask questions about the requirements themselves:
|
||||
- Ambiguities: "Is the term 'fast' quantified with specific metrics? [Ambiguity, Spec §NFR-1]"
|
||||
- Conflicts: "Do navigation requirements conflict between §FR-10 and §FR-10a? [Conflict]"
|
||||
- Assumptions: "Is the assumption of 'always available podcast API' validated? [Assumption]"
|
||||
- Dependencies: "Are external podcast API requirements documented? [Dependency, Gap]"
|
||||
- Missing definitions: "Is 'visual hierarchy' defined with measurable criteria? [Gap]"
|
||||
|
||||
**Content Consolidation**:
|
||||
- Soft cap: If raw candidate items > 40, prioritize by risk/impact
|
||||
- Merge near-duplicates checking the same requirement aspect
|
||||
- If >5 low-impact edge cases, create one item: "Are edge cases X, Y, Z addressed in requirements? [Coverage]"
|
||||
|
||||
**🚫 ABSOLUTELY PROHIBITED** - These make it an implementation test, not a requirements test:
|
||||
- ❌ Any item starting with "Verify", "Test", "Confirm", "Check" + implementation behavior
|
||||
- ❌ References to code execution, user actions, system behavior
|
||||
- ❌ "Displays correctly", "works properly", "functions as expected"
|
||||
- ❌ "Click", "navigate", "render", "load", "execute"
|
||||
- ❌ Test cases, test plans, QA procedures
|
||||
- ❌ Implementation details (frameworks, APIs, algorithms)
|
||||
|
||||
**✅ REQUIRED PATTERNS** - These test requirements quality:
|
||||
- ✅ "Are [requirement type] defined/specified/documented for [scenario]?"
|
||||
- ✅ "Is [vague term] quantified/clarified with specific criteria?"
|
||||
- ✅ "Are requirements consistent between [section A] and [section B]?"
|
||||
- ✅ "Can [requirement] be objectively measured/verified?"
|
||||
- ✅ "Are [edge cases/scenarios] addressed in requirements?"
|
||||
- ✅ "Does the spec define [missing aspect]?"
|
||||
|
||||
6. **Structure Reference**: Generate the checklist following the canonical template in `.specify/templates/checklist-template.md` for title, meta section, category headings, and ID formatting. If template is unavailable, use: H1 title, purpose/created meta lines, `##` category sections containing `- [ ] CHK### <requirement item>` lines with globally incrementing IDs starting at CHK001.
|
||||
|
||||
7. **Report**: Output full path to created checklist, item count, and remind user that each run creates a new file. Summarize:
|
||||
- Focus areas selected
|
||||
- Depth level
|
||||
- Actor/timing
|
||||
- Any explicit user-specified must-have items incorporated
|
||||
|
||||
**Important**: Each `/speckit.checklist` command invocation creates a checklist file using short, descriptive names unless file already exists. This allows:
|
||||
|
||||
- Multiple checklists of different types (e.g., `ux.md`, `test.md`, `security.md`)
|
||||
- Simple, memorable filenames that indicate checklist purpose
|
||||
- Easy identification and navigation in the `checklists/` folder
|
||||
|
||||
To avoid clutter, use descriptive types and clean up obsolete checklists when done.
|
||||
|
||||
## Example Checklist Types & Sample Items
|
||||
|
||||
**UX Requirements Quality:** `ux.md`
|
||||
|
||||
Sample items (testing the requirements, NOT the implementation):
|
||||
|
||||
- "Are visual hierarchy requirements defined with measurable criteria? [Clarity, Spec §FR-1]"
|
||||
- "Is the number and positioning of UI elements explicitly specified? [Completeness, Spec §FR-1]"
|
||||
- "Are interaction state requirements (hover, focus, active) consistently defined? [Consistency]"
|
||||
- "Are accessibility requirements specified for all interactive elements? [Coverage, Gap]"
|
||||
- "Is fallback behavior defined when images fail to load? [Edge Case, Gap]"
|
||||
- "Can 'prominent display' be objectively measured? [Measurability, Spec §FR-4]"
|
||||
|
||||
**API Requirements Quality:** `api.md`
|
||||
|
||||
Sample items:
|
||||
|
||||
- "Are error response formats specified for all failure scenarios? [Completeness]"
|
||||
- "Are rate limiting requirements quantified with specific thresholds? [Clarity]"
|
||||
- "Are authentication requirements consistent across all endpoints? [Consistency]"
|
||||
- "Are retry/timeout requirements defined for external dependencies? [Coverage, Gap]"
|
||||
- "Is versioning strategy documented in requirements? [Gap]"
|
||||
|
||||
**Performance Requirements Quality:** `performance.md`
|
||||
|
||||
Sample items:
|
||||
|
||||
- "Are performance requirements quantified with specific metrics? [Clarity]"
|
||||
- "Are performance targets defined for all critical user journeys? [Coverage]"
|
||||
- "Are performance requirements under different load conditions specified? [Completeness]"
|
||||
- "Can performance requirements be objectively measured? [Measurability]"
|
||||
- "Are degradation requirements defined for high-load scenarios? [Edge Case, Gap]"
|
||||
|
||||
**Security Requirements Quality:** `security.md`
|
||||
|
||||
Sample items:
|
||||
|
||||
- "Are authentication requirements specified for all protected resources? [Coverage]"
|
||||
- "Are data protection requirements defined for sensitive information? [Completeness]"
|
||||
- "Is the threat model documented and requirements aligned to it? [Traceability]"
|
||||
- "Are security requirements consistent with compliance obligations? [Consistency]"
|
||||
- "Are security failure/breach response requirements defined? [Gap, Exception Flow]"
|
||||
|
||||
## Anti-Examples: What NOT To Do
|
||||
|
||||
**❌ WRONG - These test implementation, not requirements:**
|
||||
|
||||
```markdown
|
||||
- [ ] CHK001 - Verify landing page displays 3 episode cards [Spec §FR-001]
|
||||
- [ ] CHK002 - Test hover states work correctly on desktop [Spec §FR-003]
|
||||
- [ ] CHK003 - Confirm logo click navigates to home page [Spec §FR-010]
|
||||
- [ ] CHK004 - Check that related episodes section shows 3-5 items [Spec §FR-005]
|
||||
```
|
||||
|
||||
**✅ CORRECT - These test requirements quality:**
|
||||
|
||||
```markdown
|
||||
- [ ] CHK001 - Are the number and layout of featured episodes explicitly specified? [Completeness, Spec §FR-001]
|
||||
- [ ] CHK002 - Are hover state requirements consistently defined for all interactive elements? [Consistency, Spec §FR-003]
|
||||
- [ ] CHK003 - Are navigation requirements clear for all clickable brand elements? [Clarity, Spec §FR-010]
|
||||
- [ ] CHK004 - Is the selection criteria for related episodes documented? [Gap, Spec §FR-005]
|
||||
- [ ] CHK005 - Are loading state requirements defined for asynchronous episode data? [Gap]
|
||||
- [ ] CHK006 - Can "visual hierarchy" requirements be objectively measured? [Measurability, Spec §FR-001]
|
||||
```
|
||||
|
||||
**Key Differences:**
|
||||
|
||||
- Wrong: Tests if the system works correctly
|
||||
- Correct: Tests if the requirements are written correctly
|
||||
- Wrong: Verification of behavior
|
||||
- Correct: Validation of requirement quality
|
||||
- Wrong: "Does it do X?"
|
||||
- Correct: "Is X clearly specified?"
|
||||
"""
|
||||
185
.gemini/commands/speckit.clarify.toml
Normal file
185
.gemini/commands/speckit.clarify.toml
Normal file
@@ -0,0 +1,185 @@
|
||||
description = "Identify underspecified areas in the current feature spec by asking up to 5 highly targeted clarification questions and encoding answers back into the spec."
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Identify underspecified areas in the current feature spec by asking up to 5 highly targeted clarification questions and encoding answers back into the spec.
|
||||
handoffs:
|
||||
- label: Build Technical Plan
|
||||
agent: speckit.plan
|
||||
prompt: Create a plan for the spec. I am building with...
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Outline
|
||||
|
||||
Goal: Detect and reduce ambiguity or missing decision points in the active feature specification and record the clarifications directly in the spec file.
|
||||
|
||||
Note: This clarification workflow is expected to run (and be completed) BEFORE invoking `/speckit.plan`. If the user explicitly states they are skipping clarification (e.g., exploratory spike), you may proceed, but must warn that downstream rework risk increases.
|
||||
|
||||
Execution steps:
|
||||
|
||||
1. Run `.specify/scripts/powershell/check-prerequisites.ps1 -Json -PathsOnly` from repo root **once** (combined `--json --paths-only` mode / `-Json -PathsOnly`). Parse minimal JSON payload fields:
|
||||
- `FEATURE_DIR`
|
||||
- `FEATURE_SPEC`
|
||||
- (Optionally capture `IMPL_PLAN`, `TASKS` for future chained flows.)
|
||||
- If JSON parsing fails, abort and instruct user to re-run `/speckit.specify` or verify feature branch environment.
|
||||
- For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||
|
||||
2. Load the current spec file. Perform a structured ambiguity & coverage scan using this taxonomy. For each category, mark status: Clear / Partial / Missing. Produce an internal coverage map used for prioritization (do not output raw map unless no questions will be asked).
|
||||
|
||||
Functional Scope & Behavior:
|
||||
- Core user goals & success criteria
|
||||
- Explicit out-of-scope declarations
|
||||
- User roles / personas differentiation
|
||||
|
||||
Domain & Data Model:
|
||||
- Entities, attributes, relationships
|
||||
- Identity & uniqueness rules
|
||||
- Lifecycle/state transitions
|
||||
- Data volume / scale assumptions
|
||||
|
||||
Interaction & UX Flow:
|
||||
- Critical user journeys / sequences
|
||||
- Error/empty/loading states
|
||||
- Accessibility or localization notes
|
||||
|
||||
Non-Functional Quality Attributes:
|
||||
- Performance (latency, throughput targets)
|
||||
- Scalability (horizontal/vertical, limits)
|
||||
- Reliability & availability (uptime, recovery expectations)
|
||||
- Observability (logging, metrics, tracing signals)
|
||||
- Security & privacy (authN/Z, data protection, threat assumptions)
|
||||
- Compliance / regulatory constraints (if any)
|
||||
|
||||
Integration & External Dependencies:
|
||||
- External services/APIs and failure modes
|
||||
- Data import/export formats
|
||||
- Protocol/versioning assumptions
|
||||
|
||||
Edge Cases & Failure Handling:
|
||||
- Negative scenarios
|
||||
- Rate limiting / throttling
|
||||
- Conflict resolution (e.g., concurrent edits)
|
||||
|
||||
Constraints & Tradeoffs:
|
||||
- Technical constraints (language, storage, hosting)
|
||||
- Explicit tradeoffs or rejected alternatives
|
||||
|
||||
Terminology & Consistency:
|
||||
- Canonical glossary terms
|
||||
- Avoided synonyms / deprecated terms
|
||||
|
||||
Completion Signals:
|
||||
- Acceptance criteria testability
|
||||
- Measurable Definition of Done style indicators
|
||||
|
||||
Misc / Placeholders:
|
||||
- TODO markers / unresolved decisions
|
||||
- Ambiguous adjectives ("robust", "intuitive") lacking quantification
|
||||
|
||||
For each category with Partial or Missing status, add a candidate question opportunity unless:
|
||||
- Clarification would not materially change implementation or validation strategy
|
||||
- Information is better deferred to planning phase (note internally)
|
||||
|
||||
3. Generate (internally) a prioritized queue of candidate clarification questions (maximum 5). Do NOT output them all at once. Apply these constraints:
|
||||
- Maximum of 10 total questions across the whole session.
|
||||
- Each question must be answerable with EITHER:
|
||||
- A short multiple‑choice selection (2–5 distinct, mutually exclusive options), OR
|
||||
- A one-word / short‑phrase answer (explicitly constrain: "Answer in <=5 words").
|
||||
- Only include questions whose answers materially impact architecture, data modeling, task decomposition, test design, UX behavior, operational readiness, or compliance validation.
|
||||
- Ensure category coverage balance: attempt to cover the highest impact unresolved categories first; avoid asking two low-impact questions when a single high-impact area (e.g., security posture) is unresolved.
|
||||
- Exclude questions already answered, trivial stylistic preferences, or plan-level execution details (unless blocking correctness).
|
||||
- Favor clarifications that reduce downstream rework risk or prevent misaligned acceptance tests.
|
||||
- If more than 5 categories remain unresolved, select the top 5 by (Impact * Uncertainty) heuristic.
|
||||
|
||||
4. Sequential questioning loop (interactive):
|
||||
- Present EXACTLY ONE question at a time.
|
||||
- For multiple‑choice questions:
|
||||
- **Analyze all options** and determine the **most suitable option** based on:
|
||||
- Best practices for the project type
|
||||
- Common patterns in similar implementations
|
||||
- Risk reduction (security, performance, maintainability)
|
||||
- Alignment with any explicit project goals or constraints visible in the spec
|
||||
- Present your **recommended option prominently** at the top with clear reasoning (1-2 sentences explaining why this is the best choice).
|
||||
- Format as: `**Recommended:** Option [X] - <reasoning>`
|
||||
- Then render all options as a Markdown table:
|
||||
|
||||
| Option | Description |
|
||||
|--------|-------------|
|
||||
| A | <Option A description> |
|
||||
| B | <Option B description> |
|
||||
| C | <Option C description> (add D/E as needed up to 5) |
|
||||
| Short | Provide a different short answer (<=5 words) (Include only if free-form alternative is appropriate) |
|
||||
|
||||
- After the table, add: `You can reply with the option letter (e.g., "A"), accept the recommendation by saying "yes" or "recommended", or provide your own short answer.`
|
||||
- For short‑answer style (no meaningful discrete options):
|
||||
- Provide your **suggested answer** based on best practices and context.
|
||||
- Format as: `**Suggested:** <your proposed answer> - <brief reasoning>`
|
||||
- Then output: `Format: Short answer (<=5 words). You can accept the suggestion by saying "yes" or "suggested", or provide your own answer.`
|
||||
- After the user answers:
|
||||
- If the user replies with "yes", "recommended", or "suggested", use your previously stated recommendation/suggestion as the answer.
|
||||
- Otherwise, validate the answer maps to one option or fits the <=5 word constraint.
|
||||
- If ambiguous, ask for a quick disambiguation (count still belongs to same question; do not advance).
|
||||
- Once satisfactory, record it in working memory (do not yet write to disk) and move to the next queued question.
|
||||
- Stop asking further questions when:
|
||||
- All critical ambiguities resolved early (remaining queued items become unnecessary), OR
|
||||
- User signals completion ("done", "good", "no more"), OR
|
||||
- You reach 5 asked questions.
|
||||
- Never reveal future queued questions in advance.
|
||||
- If no valid questions exist at start, immediately report no critical ambiguities.
|
||||
|
||||
5. Integration after EACH accepted answer (incremental update approach):
|
||||
- Maintain in-memory representation of the spec (loaded once at start) plus the raw file contents.
|
||||
- For the first integrated answer in this session:
|
||||
- Ensure a `## Clarifications` section exists (create it just after the highest-level contextual/overview section per the spec template if missing).
|
||||
- Under it, create (if not present) a `### Session YYYY-MM-DD` subheading for today.
|
||||
- Append a bullet line immediately after acceptance: `- Q: <question> → A: <final answer>`.
|
||||
- Then immediately apply the clarification to the most appropriate section(s):
|
||||
- Functional ambiguity → Update or add a bullet in Functional Requirements.
|
||||
- User interaction / actor distinction → Update User Stories or Actors subsection (if present) with clarified role, constraint, or scenario.
|
||||
- Data shape / entities → Update Data Model (add fields, types, relationships) preserving ordering; note added constraints succinctly.
|
||||
- Non-functional constraint → Add/modify measurable criteria in Non-Functional / Quality Attributes section (convert vague adjective to metric or explicit target).
|
||||
- Edge case / negative flow → Add a new bullet under Edge Cases / Error Handling (or create such subsection if template provides placeholder for it).
|
||||
- Terminology conflict → Normalize term across spec; retain original only if necessary by adding `(formerly referred to as "X")` once.
|
||||
- If the clarification invalidates an earlier ambiguous statement, replace that statement instead of duplicating; leave no obsolete contradictory text.
|
||||
- Save the spec file AFTER each integration to minimize risk of context loss (atomic overwrite).
|
||||
- Preserve formatting: do not reorder unrelated sections; keep heading hierarchy intact.
|
||||
- Keep each inserted clarification minimal and testable (avoid narrative drift).
|
||||
|
||||
6. Validation (performed after EACH write plus final pass):
|
||||
- Clarifications session contains exactly one bullet per accepted answer (no duplicates).
|
||||
- Total asked (accepted) questions ≤ 5.
|
||||
- Updated sections contain no lingering vague placeholders the new answer was meant to resolve.
|
||||
- No contradictory earlier statement remains (scan for now-invalid alternative choices removed).
|
||||
- Markdown structure valid; only allowed new headings: `## Clarifications`, `### Session YYYY-MM-DD`.
|
||||
- Terminology consistency: same canonical term used across all updated sections.
|
||||
|
||||
7. Write the updated spec back to `FEATURE_SPEC`.
|
||||
|
||||
8. Report completion (after questioning loop ends or early termination):
|
||||
- Number of questions asked & answered.
|
||||
- Path to updated spec.
|
||||
- Sections touched (list names).
|
||||
- Coverage summary table listing each taxonomy category with Status: Resolved (was Partial/Missing and addressed), Deferred (exceeds question quota or better suited for planning), Clear (already sufficient), Outstanding (still Partial/Missing but low impact).
|
||||
- If any Outstanding or Deferred remain, recommend whether to proceed to `/speckit.plan` or run `/speckit.clarify` again later post-plan.
|
||||
- Suggested next command.
|
||||
|
||||
Behavior rules:
|
||||
|
||||
- If no meaningful ambiguities found (or all potential questions would be low-impact), respond: "No critical ambiguities detected worth formal clarification." and suggest proceeding.
|
||||
- If spec file missing, instruct user to run `/speckit.specify` first (do not create a new spec here).
|
||||
- Never exceed 5 total asked questions (clarification retries for a single question do not count as new questions).
|
||||
- Avoid speculative tech stack questions unless the absence blocks functional clarity.
|
||||
- Respect user early termination signals ("stop", "done", "proceed").
|
||||
- If no questions asked due to full coverage, output a compact coverage summary (all categories Clear) then suggest advancing.
|
||||
- If quota reached with unresolved high-impact categories remaining, explicitly flag them under Deferred with rationale.
|
||||
|
||||
Context for prioritization: {{args}}
|
||||
"""
|
||||
86
.gemini/commands/speckit.constitution.toml
Normal file
86
.gemini/commands/speckit.constitution.toml
Normal file
@@ -0,0 +1,86 @@
|
||||
description = "Create or update the project constitution from interactive or provided principle inputs, ensuring all dependent templates stay in sync."
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Create or update the project constitution from interactive or provided principle inputs, ensuring all dependent templates stay in sync.
|
||||
handoffs:
|
||||
- label: Build Specification
|
||||
agent: speckit.specify
|
||||
prompt: Implement the feature specification based on the updated constitution. I want to build...
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Outline
|
||||
|
||||
You are updating the project constitution at `.specify/memory/constitution.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts.
|
||||
|
||||
Follow this execution flow:
|
||||
|
||||
1. Load the existing constitution template at `.specify/memory/constitution.md`.
|
||||
- Identify every placeholder token of the form `[ALL_CAPS_IDENTIFIER]`.
|
||||
**IMPORTANT**: The user might require less or more principles than the ones used in the template. If a number is specified, respect that - follow the general template. You will update the doc accordingly.
|
||||
|
||||
2. Collect/derive values for placeholders:
|
||||
- If user input (conversation) supplies a value, use it.
|
||||
- Otherwise infer from existing repo context (README, docs, prior constitution versions if embedded).
|
||||
- For governance dates: `RATIFICATION_DATE` is the original adoption date (if unknown ask or mark TODO), `LAST_AMENDED_DATE` is today if changes are made, otherwise keep previous.
|
||||
- `CONSTITUTION_VERSION` must increment according to semantic versioning rules:
|
||||
- MAJOR: Backward incompatible governance/principle removals or redefinitions.
|
||||
- MINOR: New principle/section added or materially expanded guidance.
|
||||
- PATCH: Clarifications, wording, typo fixes, non-semantic refinements.
|
||||
- If version bump type ambiguous, propose reasoning before finalizing.
|
||||
|
||||
3. Draft the updated constitution content:
|
||||
- Replace every placeholder with concrete text (no bracketed tokens left except intentionally retained template slots that the project has chosen not to define yet—explicitly justify any left).
|
||||
- Preserve heading hierarchy and comments can be removed once replaced unless they still add clarifying guidance.
|
||||
- Ensure each Principle section: succinct name line, paragraph (or bullet list) capturing non‑negotiable rules, explicit rationale if not obvious.
|
||||
- Ensure Governance section lists amendment procedure, versioning policy, and compliance review expectations.
|
||||
|
||||
4. Consistency propagation checklist (convert prior checklist into active validations):
|
||||
- Read `.specify/templates/plan-template.md` and ensure any "Constitution Check" or rules align with updated principles.
|
||||
- Read `.specify/templates/spec-template.md` for scope/requirements alignment—update if constitution adds/removes mandatory sections or constraints.
|
||||
- Read `.specify/templates/tasks-template.md` and ensure task categorization reflects new or removed principle-driven task types (e.g., observability, versioning, testing discipline).
|
||||
- Read each command file in `.specify/templates/commands/*.md` (including this one) to verify no outdated references (agent-specific names like CLAUDE only) remain when generic guidance is required.
|
||||
- Read any runtime guidance docs (e.g., `README.md`, `docs/quickstart.md`, or agent-specific guidance files if present). Update references to principles changed.
|
||||
|
||||
5. Produce a Sync Impact Report (prepend as an HTML comment at top of the constitution file after update):
|
||||
- Version change: old → new
|
||||
- List of modified principles (old title → new title if renamed)
|
||||
- Added sections
|
||||
- Removed sections
|
||||
- Templates requiring updates (✅ updated / ⚠ pending) with file paths
|
||||
- Follow-up TODOs if any placeholders intentionally deferred.
|
||||
|
||||
6. Validation before final output:
|
||||
- No remaining unexplained bracket tokens.
|
||||
- Version line matches report.
|
||||
- Dates ISO format YYYY-MM-DD.
|
||||
- Principles are declarative, testable, and free of vague language ("should" → replace with MUST/SHOULD rationale where appropriate).
|
||||
|
||||
7. Write the completed constitution back to `.specify/memory/constitution.md` (overwrite).
|
||||
|
||||
8. Output a final summary to the user with:
|
||||
- New version and bump rationale.
|
||||
- Any files flagged for manual follow-up.
|
||||
- Suggested commit message (e.g., `docs: amend constitution to vX.Y.Z (principle additions + governance update)`).
|
||||
|
||||
Formatting & Style Requirements:
|
||||
|
||||
- Use Markdown headings exactly as in the template (do not demote/promote levels).
|
||||
- Wrap long rationale lines to keep readability (<100 chars ideally) but do not hard enforce with awkward breaks.
|
||||
- Keep a single blank line between sections.
|
||||
- Avoid trailing whitespace.
|
||||
|
||||
If the user supplies partial updates (e.g., only one principle revision), still perform validation and version decision steps.
|
||||
|
||||
If critical info missing (e.g., ratification date truly unknown), insert `TODO(<FIELD_NAME>): explanation` and include in the Sync Impact Report under deferred items.
|
||||
|
||||
Do not create a new template; always operate on the existing `.specify/memory/constitution.md` file.
|
||||
"""
|
||||
139
.gemini/commands/speckit.implement.toml
Normal file
139
.gemini/commands/speckit.implement.toml
Normal file
@@ -0,0 +1,139 @@
|
||||
description = "Execute the implementation plan by processing and executing all tasks defined in tasks.md"
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Execute the implementation plan by processing and executing all tasks defined in tasks.md
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Outline
|
||||
|
||||
1. Run `.specify/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||
|
||||
2. **Check checklists status** (if FEATURE_DIR/checklists/ exists):
|
||||
- Scan all checklist files in the checklists/ directory
|
||||
- For each checklist, count:
|
||||
- Total items: All lines matching `- [ ]` or `- [X]` or `- [x]`
|
||||
- Completed items: Lines matching `- [X]` or `- [x]`
|
||||
- Incomplete items: Lines matching `- [ ]`
|
||||
- Create a status table:
|
||||
|
||||
```text
|
||||
| Checklist | Total | Completed | Incomplete | Status |
|
||||
|-----------|-------|-----------|------------|--------|
|
||||
| ux.md | 12 | 12 | 0 | ✓ PASS |
|
||||
| test.md | 8 | 5 | 3 | ✗ FAIL |
|
||||
| security.md | 6 | 6 | 0 | ✓ PASS |
|
||||
```
|
||||
|
||||
- Calculate overall status:
|
||||
- **PASS**: All checklists have 0 incomplete items
|
||||
- **FAIL**: One or more checklists have incomplete items
|
||||
|
||||
- **If any checklist is incomplete**:
|
||||
- Display the table with incomplete item counts
|
||||
- **STOP** and ask: "Some checklists are incomplete. Do you want to proceed with implementation anyway? (yes/no)"
|
||||
- Wait for user response before continuing
|
||||
- If user says "no" or "wait" or "stop", halt execution
|
||||
- If user says "yes" or "proceed" or "continue", proceed to step 3
|
||||
|
||||
- **If all checklists are complete**:
|
||||
- Display the table showing all checklists passed
|
||||
- Automatically proceed to step 3
|
||||
|
||||
3. Load and analyze the implementation context:
|
||||
- **REQUIRED**: Read tasks.md for the complete task list and execution plan
|
||||
- **REQUIRED**: Read plan.md for tech stack, architecture, and file structure
|
||||
- **IF EXISTS**: Read data-model.md for entities and relationships
|
||||
- **IF EXISTS**: Read contracts/ for API specifications and test requirements
|
||||
- **IF EXISTS**: Read research.md for technical decisions and constraints
|
||||
- **IF EXISTS**: Read quickstart.md for integration scenarios
|
||||
|
||||
4. **Project Setup Verification**:
|
||||
- **REQUIRED**: Create/verify ignore files based on actual project setup:
|
||||
|
||||
**Detection & Creation Logic**:
|
||||
- Check if the following command succeeds to determine if the repository is a git repo (create/verify .gitignore if so):
|
||||
|
||||
```sh
|
||||
git rev-parse --git-dir 2>/dev/null
|
||||
```
|
||||
|
||||
- Check if Dockerfile* exists or Docker in plan.md → create/verify .dockerignore
|
||||
- Check if .eslintrc* exists → create/verify .eslintignore
|
||||
- Check if eslint.config.* exists → ensure the config's `ignores` entries cover required patterns
|
||||
- Check if .prettierrc* exists → create/verify .prettierignore
|
||||
- Check if .npmrc or package.json exists → create/verify .npmignore (if publishing)
|
||||
- Check if terraform files (*.tf) exist → create/verify .terraformignore
|
||||
- Check if .helmignore needed (helm charts present) → create/verify .helmignore
|
||||
|
||||
**If ignore file already exists**: Verify it contains essential patterns, append missing critical patterns only
|
||||
**If ignore file missing**: Create with full pattern set for detected technology
|
||||
|
||||
**Common Patterns by Technology** (from plan.md tech stack):
|
||||
- **Node.js/JavaScript/TypeScript**: `node_modules/`, `dist/`, `build/`, `*.log`, `.env*`
|
||||
- **Python**: `__pycache__/`, `*.pyc`, `.venv/`, `venv/`, `dist/`, `*.egg-info/`
|
||||
- **Java**: `target/`, `*.class`, `*.jar`, `.gradle/`, `build/`
|
||||
- **C#/.NET**: `bin/`, `obj/`, `*.user`, `*.suo`, `packages/`
|
||||
- **Go**: `*.exe`, `*.test`, `vendor/`, `*.out`
|
||||
- **Ruby**: `.bundle/`, `log/`, `tmp/`, `*.gem`, `vendor/bundle/`
|
||||
- **PHP**: `vendor/`, `*.log`, `*.cache`, `*.env`
|
||||
- **Rust**: `target/`, `debug/`, `release/`, `*.rs.bk`, `*.rlib`, `*.prof*`, `.idea/`, `*.log`, `.env*`
|
||||
- **Kotlin**: `build/`, `out/`, `.gradle/`, `.idea/`, `*.class`, `*.jar`, `*.iml`, `*.log`, `.env*`
|
||||
- **C++**: `build/`, `bin/`, `obj/`, `out/`, `*.o`, `*.so`, `*.a`, `*.exe`, `*.dll`, `.idea/`, `*.log`, `.env*`
|
||||
- **C**: `build/`, `bin/`, `obj/`, `out/`, `*.o`, `*.a`, `*.so`, `*.exe`, `Makefile`, `config.log`, `.idea/`, `*.log`, `.env*`
|
||||
- **Swift**: `.build/`, `DerivedData/`, `*.swiftpm/`, `Packages/`
|
||||
- **R**: `.Rproj.user/`, `.Rhistory`, `.RData`, `.Ruserdata`, `*.Rproj`, `packrat/`, `renv/`
|
||||
- **Universal**: `.DS_Store`, `Thumbs.db`, `*.tmp`, `*.swp`, `.vscode/`, `.idea/`
|
||||
|
||||
**Tool-Specific Patterns**:
|
||||
- **Docker**: `node_modules/`, `.git/`, `Dockerfile*`, `.dockerignore`, `*.log*`, `.env*`, `coverage/`
|
||||
- **ESLint**: `node_modules/`, `dist/`, `build/`, `coverage/`, `*.min.js`
|
||||
- **Prettier**: `node_modules/`, `dist/`, `build/`, `coverage/`, `package-lock.json`, `yarn.lock`, `pnpm-lock.yaml`
|
||||
- **Terraform**: `.terraform/`, `*.tfstate*`, `*.tfvars`, `.terraform.lock.hcl`
|
||||
- **Kubernetes/k8s**: `*.secret.yaml`, `secrets/`, `.kube/`, `kubeconfig*`, `*.key`, `*.crt`
|
||||
|
||||
5. Parse tasks.md structure and extract:
|
||||
- **Task phases**: Setup, Tests, Core, Integration, Polish
|
||||
- **Task dependencies**: Sequential vs parallel execution rules
|
||||
- **Task details**: ID, description, file paths, parallel markers [P]
|
||||
- **Execution flow**: Order and dependency requirements
|
||||
|
||||
6. Execute implementation following the task plan:
|
||||
- **Phase-by-phase execution**: Complete each phase before moving to the next
|
||||
- **Respect dependencies**: Run sequential tasks in order, parallel tasks [P] can run together
|
||||
- **Follow TDD approach**: Execute test tasks before their corresponding implementation tasks
|
||||
- **File-based coordination**: Tasks affecting the same files must run sequentially
|
||||
- **Validation checkpoints**: Verify each phase completion before proceeding
|
||||
|
||||
7. Implementation execution rules:
|
||||
- **Setup first**: Initialize project structure, dependencies, configuration
|
||||
- **Tests before code**: If you need to write tests for contracts, entities, and integration scenarios
|
||||
- **Core development**: Implement models, services, CLI commands, endpoints
|
||||
- **Integration work**: Database connections, middleware, logging, external services
|
||||
- **Polish and validation**: Unit tests, performance optimization, documentation
|
||||
|
||||
8. Progress tracking and error handling:
|
||||
- Report progress after each completed task
|
||||
- Halt execution if any non-parallel task fails
|
||||
- For parallel tasks [P], continue with successful tasks, report failed ones
|
||||
- Provide clear error messages with context for debugging
|
||||
- Suggest next steps if implementation cannot proceed
|
||||
- **IMPORTANT** For completed tasks, make sure to mark the task off as [X] in the tasks file.
|
||||
|
||||
9. Completion validation:
|
||||
- Verify all required tasks are completed
|
||||
- Check that implemented features match the original specification
|
||||
- Validate that tests pass and coverage meets requirements
|
||||
- Confirm the implementation follows the technical plan
|
||||
- Report final status with summary of completed work
|
||||
|
||||
Note: This command assumes a complete task breakdown exists in tasks.md. If tasks are incomplete or missing, suggest running `/speckit.tasks` first to regenerate the task list.
|
||||
"""
|
||||
93
.gemini/commands/speckit.plan.toml
Normal file
93
.gemini/commands/speckit.plan.toml
Normal file
@@ -0,0 +1,93 @@
|
||||
description = "Execute the implementation planning workflow using the plan template to generate design artifacts."
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Execute the implementation planning workflow using the plan template to generate design artifacts.
|
||||
handoffs:
|
||||
- label: Create Tasks
|
||||
agent: speckit.tasks
|
||||
prompt: Break the plan into tasks
|
||||
send: true
|
||||
- label: Create Checklist
|
||||
agent: speckit.checklist
|
||||
prompt: Create a checklist for the following domain...
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Outline
|
||||
|
||||
1. **Setup**: Run `.specify/scripts/powershell/setup-plan.ps1 -Json` from repo root and parse JSON for FEATURE_SPEC, IMPL_PLAN, SPECS_DIR, BRANCH. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||
|
||||
2. **Load context**: Read FEATURE_SPEC and `.specify/memory/constitution.md`. Load IMPL_PLAN template (already copied).
|
||||
|
||||
3. **Execute plan workflow**: Follow the structure in IMPL_PLAN template to:
|
||||
- Fill Technical Context (mark unknowns as "NEEDS CLARIFICATION")
|
||||
- Fill Constitution Check section from constitution
|
||||
- Evaluate gates (ERROR if violations unjustified)
|
||||
- Phase 0: Generate research.md (resolve all NEEDS CLARIFICATION)
|
||||
- Phase 1: Generate data-model.md, contracts/, quickstart.md
|
||||
- Phase 1: Update agent context by running the agent script
|
||||
- Re-evaluate Constitution Check post-design
|
||||
|
||||
4. **Stop and report**: Command ends after Phase 2 planning. Report branch, IMPL_PLAN path, and generated artifacts.
|
||||
|
||||
## Phases
|
||||
|
||||
### Phase 0: Outline & Research
|
||||
|
||||
1. **Extract unknowns from Technical Context** above:
|
||||
- For each NEEDS CLARIFICATION → research task
|
||||
- For each dependency → best practices task
|
||||
- For each integration → patterns task
|
||||
|
||||
2. **Generate and dispatch research agents**:
|
||||
|
||||
```text
|
||||
For each unknown in Technical Context:
|
||||
Task: "Research {unknown} for {feature context}"
|
||||
For each technology choice:
|
||||
Task: "Find best practices for {tech} in {domain}"
|
||||
```
|
||||
|
||||
3. **Consolidate findings** in `research.md` using format:
|
||||
- Decision: [what was chosen]
|
||||
- Rationale: [why chosen]
|
||||
- Alternatives considered: [what else evaluated]
|
||||
|
||||
**Output**: research.md with all NEEDS CLARIFICATION resolved
|
||||
|
||||
### Phase 1: Design & Contracts
|
||||
|
||||
**Prerequisites:** `research.md` complete
|
||||
|
||||
1. **Extract entities from feature spec** → `data-model.md`:
|
||||
- Entity name, fields, relationships
|
||||
- Validation rules from requirements
|
||||
- State transitions if applicable
|
||||
|
||||
2. **Generate API contracts** from functional requirements:
|
||||
- For each user action → endpoint
|
||||
- Use standard REST/GraphQL patterns
|
||||
- Output OpenAPI/GraphQL schema to `/contracts/`
|
||||
|
||||
3. **Agent context update**:
|
||||
- Run `.specify/scripts/powershell/update-agent-context.ps1 -AgentType gemini`
|
||||
- These scripts detect which AI agent is in use
|
||||
- Update the appropriate agent-specific context file
|
||||
- Add only new technology from current plan
|
||||
- Preserve manual additions between markers
|
||||
|
||||
**Output**: data-model.md, /contracts/*, quickstart.md, agent-specific file
|
||||
|
||||
## Key rules
|
||||
|
||||
- Use absolute paths
|
||||
- ERROR on gate failures or unresolved clarifications
|
||||
"""
|
||||
261
.gemini/commands/speckit.specify.toml
Normal file
261
.gemini/commands/speckit.specify.toml
Normal file
@@ -0,0 +1,261 @@
|
||||
description = "Create or update the feature specification from a natural language feature description."
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Create or update the feature specification from a natural language feature description.
|
||||
handoffs:
|
||||
- label: Build Technical Plan
|
||||
agent: speckit.plan
|
||||
prompt: Create a plan for the spec. I am building with...
|
||||
- label: Clarify Spec Requirements
|
||||
agent: speckit.clarify
|
||||
prompt: Clarify specification requirements
|
||||
send: true
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Outline
|
||||
|
||||
The text the user typed after `/speckit.specify` in the triggering message **is** the feature description. Assume you always have it available in this conversation even if `{{args}}` appears literally below. Do not ask the user to repeat it unless they provided an empty command.
|
||||
|
||||
Given that feature description, do this:
|
||||
|
||||
1. **Generate a concise short name** (2-4 words) for the branch:
|
||||
- Analyze the feature description and extract the most meaningful keywords
|
||||
- Create a 2-4 word short name that captures the essence of the feature
|
||||
- Use action-noun format when possible (e.g., "add-user-auth", "fix-payment-bug")
|
||||
- Preserve technical terms and acronyms (OAuth2, API, JWT, etc.)
|
||||
- Keep it concise but descriptive enough to understand the feature at a glance
|
||||
- Examples:
|
||||
- "I want to add user authentication" → "user-auth"
|
||||
- "Implement OAuth2 integration for the API" → "oauth2-api-integration"
|
||||
- "Create a dashboard for analytics" → "analytics-dashboard"
|
||||
- "Fix payment processing timeout bug" → "fix-payment-timeout"
|
||||
|
||||
2. **Check for existing branches before creating new one**:
|
||||
|
||||
a. First, fetch all remote branches to ensure we have the latest information:
|
||||
```bash
|
||||
git fetch --all --prune
|
||||
```
|
||||
|
||||
b. Find the highest feature number across all sources for the short-name:
|
||||
- Remote branches: `git ls-remote --heads origin | grep -E 'refs/heads/[0-9]+-<short-name>$'`
|
||||
- Local branches: `git branch | grep -E '^[* ]*[0-9]+-<short-name>$'`
|
||||
- Specs directories: Check for directories matching `specs/[0-9]+-<short-name>`
|
||||
|
||||
c. Determine the next available number:
|
||||
- Extract all numbers from all three sources
|
||||
- Find the highest number N
|
||||
- Use N+1 for the new branch number
|
||||
|
||||
d. Run the script `.specify/scripts/powershell/create-new-feature.ps1 -Json "{{args}}"` with the calculated number and short-name:
|
||||
- Pass `--number N+1` and `--short-name "your-short-name"` along with the feature description
|
||||
- Bash example: `.specify/scripts/powershell/create-new-feature.ps1 -Json "{{args}}" --json --number 5 --short-name "user-auth" "Add user authentication"`
|
||||
- PowerShell example: `.specify/scripts/powershell/create-new-feature.ps1 -Json "{{args}}" -Json -Number 5 -ShortName "user-auth" "Add user authentication"`
|
||||
|
||||
**IMPORTANT**:
|
||||
- Check all three sources (remote branches, local branches, specs directories) to find the highest number
|
||||
- Only match branches/directories with the exact short-name pattern
|
||||
- If no existing branches/directories found with this short-name, start with number 1
|
||||
- You must only ever run this script once per feature
|
||||
- The JSON is provided in the terminal as output - always refer to it to get the actual content you're looking for
|
||||
- The JSON output will contain BRANCH_NAME and SPEC_FILE paths
|
||||
- For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot")
|
||||
|
||||
3. Load `.specify/templates/spec-template.md` to understand required sections.
|
||||
|
||||
4. Follow this execution flow:
|
||||
|
||||
1. Parse user description from Input
|
||||
If empty: ERROR "No feature description provided"
|
||||
2. Extract key concepts from description
|
||||
Identify: actors, actions, data, constraints
|
||||
3. For unclear aspects:
|
||||
- Make informed guesses based on context and industry standards
|
||||
- Only mark with [NEEDS CLARIFICATION: specific question] if:
|
||||
- The choice significantly impacts feature scope or user experience
|
||||
- Multiple reasonable interpretations exist with different implications
|
||||
- No reasonable default exists
|
||||
- **LIMIT: Maximum 3 [NEEDS CLARIFICATION] markers total**
|
||||
- Prioritize clarifications by impact: scope > security/privacy > user experience > technical details
|
||||
4. Fill User Scenarios & Testing section
|
||||
If no clear user flow: ERROR "Cannot determine user scenarios"
|
||||
5. Generate Functional Requirements
|
||||
Each requirement must be testable
|
||||
Use reasonable defaults for unspecified details (document assumptions in Assumptions section)
|
||||
6. Define Success Criteria
|
||||
Create measurable, technology-agnostic outcomes
|
||||
Include both quantitative metrics (time, performance, volume) and qualitative measures (user satisfaction, task completion)
|
||||
Each criterion must be verifiable without implementation details
|
||||
7. Identify Key Entities (if data involved)
|
||||
8. Return: SUCCESS (spec ready for planning)
|
||||
|
||||
5. Write the specification to SPEC_FILE using the template structure, replacing placeholders with concrete details derived from the feature description (arguments) while preserving section order and headings.
|
||||
|
||||
6. **Specification Quality Validation**: After writing the initial spec, validate it against quality criteria:
|
||||
|
||||
a. **Create Spec Quality Checklist**: Generate a checklist file at `FEATURE_DIR/checklists/requirements.md` using the checklist template structure with these validation items:
|
||||
|
||||
```markdown
|
||||
# Specification Quality Checklist: [FEATURE NAME]
|
||||
|
||||
**Purpose**: Validate specification completeness and quality before proceeding to planning
|
||||
**Created**: [DATE]
|
||||
**Feature**: [Link to spec.md]
|
||||
|
||||
## Content Quality
|
||||
|
||||
- [ ] No implementation details (languages, frameworks, APIs)
|
||||
- [ ] Focused on user value and business needs
|
||||
- [ ] Written for non-technical stakeholders
|
||||
- [ ] All mandatory sections completed
|
||||
|
||||
## Requirement Completeness
|
||||
|
||||
- [ ] No [NEEDS CLARIFICATION] markers remain
|
||||
- [ ] Requirements are testable and unambiguous
|
||||
- [ ] Success criteria are measurable
|
||||
- [ ] Success criteria are technology-agnostic (no implementation details)
|
||||
- [ ] All acceptance scenarios are defined
|
||||
- [ ] Edge cases are identified
|
||||
- [ ] Scope is clearly bounded
|
||||
- [ ] Dependencies and assumptions identified
|
||||
|
||||
## Feature Readiness
|
||||
|
||||
- [ ] All functional requirements have clear acceptance criteria
|
||||
- [ ] User scenarios cover primary flows
|
||||
- [ ] Feature meets measurable outcomes defined in Success Criteria
|
||||
- [ ] No implementation details leak into specification
|
||||
|
||||
## Notes
|
||||
|
||||
- Items marked incomplete require spec updates before `/speckit.clarify` or `/speckit.plan`
|
||||
```
|
||||
|
||||
b. **Run Validation Check**: Review the spec against each checklist item:
|
||||
- For each item, determine if it passes or fails
|
||||
- Document specific issues found (quote relevant spec sections)
|
||||
|
||||
c. **Handle Validation Results**:
|
||||
|
||||
- **If all items pass**: Mark checklist complete and proceed to step 6
|
||||
|
||||
- **If items fail (excluding [NEEDS CLARIFICATION])**:
|
||||
1. List the failing items and specific issues
|
||||
2. Update the spec to address each issue
|
||||
3. Re-run validation until all items pass (max 3 iterations)
|
||||
4. If still failing after 3 iterations, document remaining issues in checklist notes and warn user
|
||||
|
||||
- **If [NEEDS CLARIFICATION] markers remain**:
|
||||
1. Extract all [NEEDS CLARIFICATION: ...] markers from the spec
|
||||
2. **LIMIT CHECK**: If more than 3 markers exist, keep only the 3 most critical (by scope/security/UX impact) and make informed guesses for the rest
|
||||
3. For each clarification needed (max 3), present options to user in this format:
|
||||
|
||||
```markdown
|
||||
## Question [N]: [Topic]
|
||||
|
||||
**Context**: [Quote relevant spec section]
|
||||
|
||||
**What we need to know**: [Specific question from NEEDS CLARIFICATION marker]
|
||||
|
||||
**Suggested Answers**:
|
||||
|
||||
| Option | Answer | Implications |
|
||||
|--------|--------|--------------|
|
||||
| A | [First suggested answer] | [What this means for the feature] |
|
||||
| B | [Second suggested answer] | [What this means for the feature] |
|
||||
| C | [Third suggested answer] | [What this means for the feature] |
|
||||
| Custom | Provide your own answer | [Explain how to provide custom input] |
|
||||
|
||||
**Your choice**: _[Wait for user response]_
|
||||
```
|
||||
|
||||
4. **CRITICAL - Table Formatting**: Ensure markdown tables are properly formatted:
|
||||
- Use consistent spacing with pipes aligned
|
||||
- Each cell should have spaces around content: `| Content |` not `|Content|`
|
||||
- Header separator must have at least 3 dashes: `|--------|`
|
||||
- Test that the table renders correctly in markdown preview
|
||||
5. Number questions sequentially (Q1, Q2, Q3 - max 3 total)
|
||||
6. Present all questions together before waiting for responses
|
||||
7. Wait for user to respond with their choices for all questions (e.g., "Q1: A, Q2: Custom - [details], Q3: B")
|
||||
8. Update the spec by replacing each [NEEDS CLARIFICATION] marker with the user's selected or provided answer
|
||||
9. Re-run validation after all clarifications are resolved
|
||||
|
||||
d. **Update Checklist**: After each validation iteration, update the checklist file with current pass/fail status
|
||||
|
||||
7. Report completion with branch name, spec file path, checklist results, and readiness for the next phase (`/speckit.clarify` or `/speckit.plan`).
|
||||
|
||||
**NOTE:** The script creates and checks out the new branch and initializes the spec file before writing.
|
||||
|
||||
## General Guidelines
|
||||
|
||||
## Quick Guidelines
|
||||
|
||||
- Focus on **WHAT** users need and **WHY**.
|
||||
- Avoid HOW to implement (no tech stack, APIs, code structure).
|
||||
- Written for business stakeholders, not developers.
|
||||
- DO NOT create any checklists that are embedded in the spec. That will be a separate command.
|
||||
|
||||
### Section Requirements
|
||||
|
||||
- **Mandatory sections**: Must be completed for every feature
|
||||
- **Optional sections**: Include only when relevant to the feature
|
||||
- When a section doesn't apply, remove it entirely (don't leave as "N/A")
|
||||
|
||||
### For AI Generation
|
||||
|
||||
When creating this spec from a user prompt:
|
||||
|
||||
1. **Make informed guesses**: Use context, industry standards, and common patterns to fill gaps
|
||||
2. **Document assumptions**: Record reasonable defaults in the Assumptions section
|
||||
3. **Limit clarifications**: Maximum 3 [NEEDS CLARIFICATION] markers - use only for critical decisions that:
|
||||
- Significantly impact feature scope or user experience
|
||||
- Have multiple reasonable interpretations with different implications
|
||||
- Lack any reasonable default
|
||||
4. **Prioritize clarifications**: scope > security/privacy > user experience > technical details
|
||||
5. **Think like a tester**: Every vague requirement should fail the "testable and unambiguous" checklist item
|
||||
6. **Common areas needing clarification** (only if no reasonable default exists):
|
||||
- Feature scope and boundaries (include/exclude specific use cases)
|
||||
- User types and permissions (if multiple conflicting interpretations possible)
|
||||
- Security/compliance requirements (when legally/financially significant)
|
||||
|
||||
**Examples of reasonable defaults** (don't ask about these):
|
||||
|
||||
- Data retention: Industry-standard practices for the domain
|
||||
- Performance targets: Standard web/mobile app expectations unless specified
|
||||
- Error handling: User-friendly messages with appropriate fallbacks
|
||||
- Authentication method: Standard session-based or OAuth2 for web apps
|
||||
- Integration patterns: RESTful APIs unless specified otherwise
|
||||
|
||||
### Success Criteria Guidelines
|
||||
|
||||
Success criteria must be:
|
||||
|
||||
1. **Measurable**: Include specific metrics (time, percentage, count, rate)
|
||||
2. **Technology-agnostic**: No mention of frameworks, languages, databases, or tools
|
||||
3. **User-focused**: Describe outcomes from user/business perspective, not system internals
|
||||
4. **Verifiable**: Can be tested/validated without knowing implementation details
|
||||
|
||||
**Good examples**:
|
||||
|
||||
- "Users can complete checkout in under 3 minutes"
|
||||
- "System supports 10,000 concurrent users"
|
||||
- "95% of searches return results in under 1 second"
|
||||
- "Task completion rate improves by 40%"
|
||||
|
||||
**Bad examples** (implementation-focused):
|
||||
|
||||
- "API response time is under 200ms" (too technical, use "Users see results instantly")
|
||||
- "Database can handle 1000 TPS" (implementation detail, use user-facing metric)
|
||||
- "React components render efficiently" (framework-specific)
|
||||
- "Redis cache hit rate above 80%" (technology-specific)
|
||||
"""
|
||||
141
.gemini/commands/speckit.tasks.toml
Normal file
141
.gemini/commands/speckit.tasks.toml
Normal file
@@ -0,0 +1,141 @@
|
||||
description = "Generate an actionable, dependency-ordered tasks.md for the feature based on available design artifacts."
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Generate an actionable, dependency-ordered tasks.md for the feature based on available design artifacts.
|
||||
handoffs:
|
||||
- label: Analyze For Consistency
|
||||
agent: speckit.analyze
|
||||
prompt: Run a project analysis for consistency
|
||||
send: true
|
||||
- label: Implement Project
|
||||
agent: speckit.implement
|
||||
prompt: Start the implementation in phases
|
||||
send: true
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Outline
|
||||
|
||||
1. **Setup**: Run `.specify/scripts/powershell/check-prerequisites.ps1 -Json` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||
|
||||
2. **Load design documents**: Read from FEATURE_DIR:
|
||||
- **Required**: plan.md (tech stack, libraries, structure), spec.md (user stories with priorities)
|
||||
- **Optional**: data-model.md (entities), contracts/ (API endpoints), research.md (decisions), quickstart.md (test scenarios)
|
||||
- Note: Not all projects have all documents. Generate tasks based on what's available.
|
||||
|
||||
3. **Execute task generation workflow**:
|
||||
- Load plan.md and extract tech stack, libraries, project structure
|
||||
- Load spec.md and extract user stories with their priorities (P1, P2, P3, etc.)
|
||||
- If data-model.md exists: Extract entities and map to user stories
|
||||
- If contracts/ exists: Map endpoints to user stories
|
||||
- If research.md exists: Extract decisions for setup tasks
|
||||
- Generate tasks organized by user story (see Task Generation Rules below)
|
||||
- Generate dependency graph showing user story completion order
|
||||
- Create parallel execution examples per user story
|
||||
- Validate task completeness (each user story has all needed tasks, independently testable)
|
||||
|
||||
4. **Generate tasks.md**: Use `.specify.specify/templates/tasks-template.md` as structure, fill with:
|
||||
- Correct feature name from plan.md
|
||||
- Phase 1: Setup tasks (project initialization)
|
||||
- Phase 2: Foundational tasks (blocking prerequisites for all user stories)
|
||||
- Phase 3+: One phase per user story (in priority order from spec.md)
|
||||
- Each phase includes: story goal, independent test criteria, tests (if requested), implementation tasks
|
||||
- Final Phase: Polish & cross-cutting concerns
|
||||
- All tasks must follow the strict checklist format (see Task Generation Rules below)
|
||||
- Clear file paths for each task
|
||||
- Dependencies section showing story completion order
|
||||
- Parallel execution examples per story
|
||||
- Implementation strategy section (MVP first, incremental delivery)
|
||||
|
||||
5. **Report**: Output path to generated tasks.md and summary:
|
||||
- Total task count
|
||||
- Task count per user story
|
||||
- Parallel opportunities identified
|
||||
- Independent test criteria for each story
|
||||
- Suggested MVP scope (typically just User Story 1)
|
||||
- Format validation: Confirm ALL tasks follow the checklist format (checkbox, ID, labels, file paths)
|
||||
|
||||
Context for task generation: {{args}}
|
||||
|
||||
The tasks.md should be immediately executable - each task must be specific enough that an LLM can complete it without additional context.
|
||||
|
||||
## Task Generation Rules
|
||||
|
||||
**CRITICAL**: Tasks MUST be organized by user story to enable independent implementation and testing.
|
||||
|
||||
**Tests are OPTIONAL**: Only generate test tasks if explicitly requested in the feature specification or if user requests TDD approach.
|
||||
|
||||
### Checklist Format (REQUIRED)
|
||||
|
||||
Every task MUST strictly follow this format:
|
||||
|
||||
```text
|
||||
- [ ] [TaskID] [P?] [Story?] Description with file path
|
||||
```
|
||||
|
||||
**Format Components**:
|
||||
|
||||
1. **Checkbox**: ALWAYS start with `- [ ]` (markdown checkbox)
|
||||
2. **Task ID**: Sequential number (T001, T002, T003...) in execution order
|
||||
3. **[P] marker**: Include ONLY if task is parallelizable (different files, no dependencies on incomplete tasks)
|
||||
4. **[Story] label**: REQUIRED for user story phase tasks only
|
||||
- Format: [US1], [US2], [US3], etc. (maps to user stories from spec.md)
|
||||
- Setup phase: NO story label
|
||||
- Foundational phase: NO story label
|
||||
- User Story phases: MUST have story label
|
||||
- Polish phase: NO story label
|
||||
5. **Description**: Clear action with exact file path
|
||||
|
||||
**Examples**:
|
||||
|
||||
- ✅ CORRECT: `- [ ] T001 Create project structure per implementation plan`
|
||||
- ✅ CORRECT: `- [ ] T005 [P] Implement authentication middleware in src/middleware/auth.py`
|
||||
- ✅ CORRECT: `- [ ] T012 [P] [US1] Create User model in src/models/user.py`
|
||||
- ✅ CORRECT: `- [ ] T014 [US1] Implement UserService in src/services/user_service.py`
|
||||
- ❌ WRONG: `- [ ] Create User model` (missing ID and Story label)
|
||||
- ❌ WRONG: `T001 [US1] Create model` (missing checkbox)
|
||||
- ❌ WRONG: `- [ ] [US1] Create User model` (missing Task ID)
|
||||
- ❌ WRONG: `- [ ] T001 [US1] Create model` (missing file path)
|
||||
|
||||
### Task Organization
|
||||
|
||||
1. **From User Stories (spec.md)** - PRIMARY ORGANIZATION:
|
||||
- Each user story (P1, P2, P3...) gets its own phase
|
||||
- Map all related components to their story:
|
||||
- Models needed for that story
|
||||
- Services needed for that story
|
||||
- Endpoints/UI needed for that story
|
||||
- If tests requested: Tests specific to that story
|
||||
- Mark story dependencies (most stories should be independent)
|
||||
|
||||
2. **From Contracts**:
|
||||
- Map each contract/endpoint → to the user story it serves
|
||||
- If tests requested: Each contract → contract test task [P] before implementation in that story's phase
|
||||
|
||||
3. **From Data Model**:
|
||||
- Map each entity to the user story(ies) that need it
|
||||
- If entity serves multiple stories: Put in earliest story or Setup phase
|
||||
- Relationships → service layer tasks in appropriate story phase
|
||||
|
||||
4. **From Setup/Infrastructure**:
|
||||
- Shared infrastructure → Setup phase (Phase 1)
|
||||
- Foundational/blocking tasks → Foundational phase (Phase 2)
|
||||
- Story-specific setup → within that story's phase
|
||||
|
||||
### Phase Structure
|
||||
|
||||
- **Phase 1**: Setup (project initialization)
|
||||
- **Phase 2**: Foundational (blocking prerequisites - MUST complete before user stories)
|
||||
- **Phase 3+**: User Stories in priority order (P1, P2, P3...)
|
||||
- Within each story: Tests (if requested) → Models → Services → Endpoints → Integration
|
||||
- Each phase should be a complete, independently testable increment
|
||||
- **Final Phase**: Polish & Cross-Cutting Concerns
|
||||
"""
|
||||
32
.gemini/commands/speckit.taskstoissues.toml
Normal file
32
.gemini/commands/speckit.taskstoissues.toml
Normal file
@@ -0,0 +1,32 @@
|
||||
description = "Convert existing tasks into actionable, dependency-ordered GitHub issues for the feature based on available design artifacts."
|
||||
|
||||
prompt = """
|
||||
---
|
||||
description: Convert existing tasks into actionable, dependency-ordered GitHub issues for the feature based on available design artifacts.
|
||||
tools: ['github/github-mcp-server/issue_write']
|
||||
---
|
||||
|
||||
## User Input
|
||||
|
||||
```text
|
||||
$ARGUMENTS
|
||||
```
|
||||
|
||||
You **MUST** consider the user input before proceeding (if not empty).
|
||||
|
||||
## Outline
|
||||
|
||||
1. Run `.specify/scripts/powershell/check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute. For single quotes in args like "I'm Groot", use escape syntax: e.g 'I'\\''m Groot' (or double-quote if possible: "I'm Groot").
|
||||
1. From the executed script, extract the path to **tasks**.
|
||||
1. Get the Git remote by running:
|
||||
|
||||
```bash
|
||||
git config --get remote.origin.url
|
||||
```
|
||||
|
||||
**ONLY PROCEED TO NEXT STEPS IF THE REMOTE IS A GITHUB URL**
|
||||
|
||||
1. For each task in the list, use the GitHub MCP server to create a new issue in the repository that is representative of the Git remote.
|
||||
|
||||
**UNDER NO CIRCUMSTANCES EVER CREATE ISSUES IN REPOSITORIES THAT DO NOT MATCH THE REMOTE URL**
|
||||
"""
|
||||
15
.gemini/settings.json
Normal file
15
.gemini/settings.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"general": {
|
||||
"previewFeatures": true,
|
||||
"enablePromptCompletion": true
|
||||
},
|
||||
"ide": {
|
||||
"enabled": true
|
||||
},
|
||||
"output": {
|
||||
"format": "json"
|
||||
},
|
||||
"ui": {
|
||||
"showStatusInTitle": true
|
||||
}
|
||||
}
|
||||
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1 @@
|
||||
# Template สำหรับ PR
|
||||
1
.github/workflows/auto-label.yml
vendored
Normal file
1
.github/workflows/auto-label.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
# ติด labels อัตโนมัติ
|
||||
1
.github/workflows/link-checker.yml
vendored
Normal file
1
.github/workflows/link-checker.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
# ตรวจสอบ broken links
|
||||
63
.github/workflows/spec-validation.yml
vendored
Normal file
63
.github/workflows/spec-validation.yml
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
name: Spec Validation
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'specs/**'
|
||||
- 'diagrams/**'
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
validate-markdown:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# 1. ตรวจสอบ Markdown syntax
|
||||
- name: Lint Markdown
|
||||
uses: avto-dev/markdown-lint@v1
|
||||
with:
|
||||
config: '.markdownlint.json'
|
||||
args: 'specs/**/*.md'
|
||||
|
||||
# 2. ตรวจสอบ internal links
|
||||
- name: Check Links
|
||||
uses: gaurav-nelson/github-action-markdown-link-check@v1
|
||||
with:
|
||||
use-quiet-mode: 'yes'
|
||||
folder-path: 'specs'
|
||||
|
||||
# 3. ตรวจสอบ required metadata
|
||||
- name: Validate Metadata
|
||||
run: |
|
||||
python scripts/validate-spec-metadata.py
|
||||
|
||||
# 4. ตรวจสอบ version consistency
|
||||
- name: Check Version Numbers
|
||||
run: |
|
||||
python scripts/check-versions.py
|
||||
|
||||
validate-diagrams:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# ตรวจสอบว่า Mermaid diagrams render ได้
|
||||
- name: Validate Mermaid
|
||||
uses: neenjaw/compile-mermaid-markdown-action@v1
|
||||
with:
|
||||
files: 'diagrams/**/*.mmd'
|
||||
|
||||
check-todos:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# แจ้งเตือนถ้ามี TODO/FIXME
|
||||
- name: Check for TODOs
|
||||
run: |
|
||||
if grep -r "TODO\|FIXME" specs/; then
|
||||
echo "⚠️ Found TODO/FIXME in specs!"
|
||||
exit 1
|
||||
fi
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -46,7 +46,7 @@ build/Release
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
*.vsix"
|
||||
# ============================================
|
||||
# Logs & Debug
|
||||
# ============================================
|
||||
@@ -350,4 +350,4 @@ vendor/bundle
|
||||
# *.jar
|
||||
# *.rar
|
||||
# *.tar
|
||||
# *.zip
|
||||
# *.zip
|
||||
|
||||
4
.prettierignore
Normal file
4
.prettierignore
Normal file
@@ -0,0 +1,4 @@
|
||||
node_modules
|
||||
dist
|
||||
build
|
||||
*.min.js
|
||||
12
.prettierrc
Normal file
12
.prettierrc
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"semi": true,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"trailingComma": "es5",
|
||||
"printWidth": 120,
|
||||
"arrowParens": "always",
|
||||
"endOfLine": "lf",
|
||||
"bracketSpacing": true,
|
||||
"jsxSingleQuote": false
|
||||
}
|
||||
50
.specify/memory/constitution.md
Normal file
50
.specify/memory/constitution.md
Normal file
@@ -0,0 +1,50 @@
|
||||
# [PROJECT_NAME] Constitution
|
||||
<!-- Example: Spec Constitution, TaskFlow Constitution, etc. -->
|
||||
|
||||
## Core Principles
|
||||
|
||||
### [PRINCIPLE_1_NAME]
|
||||
<!-- Example: I. Library-First -->
|
||||
[PRINCIPLE_1_DESCRIPTION]
|
||||
<!-- Example: Every feature starts as a standalone library; Libraries must be self-contained, independently testable, documented; Clear purpose required - no organizational-only libraries -->
|
||||
|
||||
### [PRINCIPLE_2_NAME]
|
||||
<!-- Example: II. CLI Interface -->
|
||||
[PRINCIPLE_2_DESCRIPTION]
|
||||
<!-- Example: Every library exposes functionality via CLI; Text in/out protocol: stdin/args → stdout, errors → stderr; Support JSON + human-readable formats -->
|
||||
|
||||
### [PRINCIPLE_3_NAME]
|
||||
<!-- Example: III. Test-First (NON-NEGOTIABLE) -->
|
||||
[PRINCIPLE_3_DESCRIPTION]
|
||||
<!-- Example: TDD mandatory: Tests written → User approved → Tests fail → Then implement; Red-Green-Refactor cycle strictly enforced -->
|
||||
|
||||
### [PRINCIPLE_4_NAME]
|
||||
<!-- Example: IV. Integration Testing -->
|
||||
[PRINCIPLE_4_DESCRIPTION]
|
||||
<!-- Example: Focus areas requiring integration tests: New library contract tests, Contract changes, Inter-service communication, Shared schemas -->
|
||||
|
||||
### [PRINCIPLE_5_NAME]
|
||||
<!-- Example: V. Observability, VI. Versioning & Breaking Changes, VII. Simplicity -->
|
||||
[PRINCIPLE_5_DESCRIPTION]
|
||||
<!-- Example: Text I/O ensures debuggability; Structured logging required; Or: MAJOR.MINOR.BUILD format; Or: Start simple, YAGNI principles -->
|
||||
|
||||
## [SECTION_2_NAME]
|
||||
<!-- Example: Additional Constraints, Security Requirements, Performance Standards, etc. -->
|
||||
|
||||
[SECTION_2_CONTENT]
|
||||
<!-- Example: Technology stack requirements, compliance standards, deployment policies, etc. -->
|
||||
|
||||
## [SECTION_3_NAME]
|
||||
<!-- Example: Development Workflow, Review Process, Quality Gates, etc. -->
|
||||
|
||||
[SECTION_3_CONTENT]
|
||||
<!-- Example: Code review requirements, testing gates, deployment approval process, etc. -->
|
||||
|
||||
## Governance
|
||||
<!-- Example: Constitution supersedes all other practices; Amendments require documentation, approval, migration plan -->
|
||||
|
||||
[GOVERNANCE_RULES]
|
||||
<!-- Example: All PRs/reviews must verify compliance; Complexity must be justified; Use [GUIDANCE_FILE] for runtime development guidance -->
|
||||
|
||||
**Version**: [CONSTITUTION_VERSION] | **Ratified**: [RATIFICATION_DATE] | **Last Amended**: [LAST_AMENDED_DATE]
|
||||
<!-- Example: Version: 2.1.1 | Ratified: 2025-06-13 | Last Amended: 2025-07-16 -->
|
||||
148
.specify/scripts/powershell/check-prerequisites.ps1
Normal file
148
.specify/scripts/powershell/check-prerequisites.ps1
Normal file
@@ -0,0 +1,148 @@
|
||||
#!/usr/bin/env pwsh
|
||||
|
||||
# Consolidated prerequisite checking script (PowerShell)
|
||||
#
|
||||
# This script provides unified prerequisite checking for Spec-Driven Development workflow.
|
||||
# It replaces the functionality previously spread across multiple scripts.
|
||||
#
|
||||
# Usage: ./check-prerequisites.ps1 [OPTIONS]
|
||||
#
|
||||
# OPTIONS:
|
||||
# -Json Output in JSON format
|
||||
# -RequireTasks Require tasks.md to exist (for implementation phase)
|
||||
# -IncludeTasks Include tasks.md in AVAILABLE_DOCS list
|
||||
# -PathsOnly Only output path variables (no validation)
|
||||
# -Help, -h Show help message
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[switch]$Json,
|
||||
[switch]$RequireTasks,
|
||||
[switch]$IncludeTasks,
|
||||
[switch]$PathsOnly,
|
||||
[switch]$Help
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Show help if requested
|
||||
if ($Help) {
|
||||
Write-Output @"
|
||||
Usage: check-prerequisites.ps1 [OPTIONS]
|
||||
|
||||
Consolidated prerequisite checking for Spec-Driven Development workflow.
|
||||
|
||||
OPTIONS:
|
||||
-Json Output in JSON format
|
||||
-RequireTasks Require tasks.md to exist (for implementation phase)
|
||||
-IncludeTasks Include tasks.md in AVAILABLE_DOCS list
|
||||
-PathsOnly Only output path variables (no prerequisite validation)
|
||||
-Help, -h Show this help message
|
||||
|
||||
EXAMPLES:
|
||||
# Check task prerequisites (plan.md required)
|
||||
.\check-prerequisites.ps1 -Json
|
||||
|
||||
# Check implementation prerequisites (plan.md + tasks.md required)
|
||||
.\check-prerequisites.ps1 -Json -RequireTasks -IncludeTasks
|
||||
|
||||
# Get feature paths only (no validation)
|
||||
.\check-prerequisites.ps1 -PathsOnly
|
||||
|
||||
"@
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Source common functions
|
||||
. "$PSScriptRoot/common.ps1"
|
||||
|
||||
# Get feature paths and validate branch
|
||||
$paths = Get-FeaturePathsEnv
|
||||
|
||||
if (-not (Test-FeatureBranch -Branch $paths.CURRENT_BRANCH -HasGit:$paths.HAS_GIT)) {
|
||||
exit 1
|
||||
}
|
||||
|
||||
# If paths-only mode, output paths and exit (support combined -Json -PathsOnly)
|
||||
if ($PathsOnly) {
|
||||
if ($Json) {
|
||||
[PSCustomObject]@{
|
||||
REPO_ROOT = $paths.REPO_ROOT
|
||||
BRANCH = $paths.CURRENT_BRANCH
|
||||
FEATURE_DIR = $paths.FEATURE_DIR
|
||||
FEATURE_SPEC = $paths.FEATURE_SPEC
|
||||
IMPL_PLAN = $paths.IMPL_PLAN
|
||||
TASKS = $paths.TASKS
|
||||
} | ConvertTo-Json -Compress
|
||||
} else {
|
||||
Write-Output "REPO_ROOT: $($paths.REPO_ROOT)"
|
||||
Write-Output "BRANCH: $($paths.CURRENT_BRANCH)"
|
||||
Write-Output "FEATURE_DIR: $($paths.FEATURE_DIR)"
|
||||
Write-Output "FEATURE_SPEC: $($paths.FEATURE_SPEC)"
|
||||
Write-Output "IMPL_PLAN: $($paths.IMPL_PLAN)"
|
||||
Write-Output "TASKS: $($paths.TASKS)"
|
||||
}
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Validate required directories and files
|
||||
if (-not (Test-Path $paths.FEATURE_DIR -PathType Container)) {
|
||||
Write-Output "ERROR: Feature directory not found: $($paths.FEATURE_DIR)"
|
||||
Write-Output "Run /speckit.specify first to create the feature structure."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if (-not (Test-Path $paths.IMPL_PLAN -PathType Leaf)) {
|
||||
Write-Output "ERROR: plan.md not found in $($paths.FEATURE_DIR)"
|
||||
Write-Output "Run /speckit.plan first to create the implementation plan."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Check for tasks.md if required
|
||||
if ($RequireTasks -and -not (Test-Path $paths.TASKS -PathType Leaf)) {
|
||||
Write-Output "ERROR: tasks.md not found in $($paths.FEATURE_DIR)"
|
||||
Write-Output "Run /speckit.tasks first to create the task list."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Build list of available documents
|
||||
$docs = @()
|
||||
|
||||
# Always check these optional docs
|
||||
if (Test-Path $paths.RESEARCH) { $docs += 'research.md' }
|
||||
if (Test-Path $paths.DATA_MODEL) { $docs += 'data-model.md' }
|
||||
|
||||
# Check contracts directory (only if it exists and has files)
|
||||
if ((Test-Path $paths.CONTRACTS_DIR) -and (Get-ChildItem -Path $paths.CONTRACTS_DIR -ErrorAction SilentlyContinue | Select-Object -First 1)) {
|
||||
$docs += 'contracts/'
|
||||
}
|
||||
|
||||
if (Test-Path $paths.QUICKSTART) { $docs += 'quickstart.md' }
|
||||
|
||||
# Include tasks.md if requested and it exists
|
||||
if ($IncludeTasks -and (Test-Path $paths.TASKS)) {
|
||||
$docs += 'tasks.md'
|
||||
}
|
||||
|
||||
# Output results
|
||||
if ($Json) {
|
||||
# JSON output
|
||||
[PSCustomObject]@{
|
||||
FEATURE_DIR = $paths.FEATURE_DIR
|
||||
AVAILABLE_DOCS = $docs
|
||||
} | ConvertTo-Json -Compress
|
||||
} else {
|
||||
# Text output
|
||||
Write-Output "FEATURE_DIR:$($paths.FEATURE_DIR)"
|
||||
Write-Output "AVAILABLE_DOCS:"
|
||||
|
||||
# Show status of each potential document
|
||||
Test-FileExists -Path $paths.RESEARCH -Description 'research.md' | Out-Null
|
||||
Test-FileExists -Path $paths.DATA_MODEL -Description 'data-model.md' | Out-Null
|
||||
Test-DirHasFiles -Path $paths.CONTRACTS_DIR -Description 'contracts/' | Out-Null
|
||||
Test-FileExists -Path $paths.QUICKSTART -Description 'quickstart.md' | Out-Null
|
||||
|
||||
if ($IncludeTasks) {
|
||||
Test-FileExists -Path $paths.TASKS -Description 'tasks.md' | Out-Null
|
||||
}
|
||||
}
|
||||
137
.specify/scripts/powershell/common.ps1
Normal file
137
.specify/scripts/powershell/common.ps1
Normal file
@@ -0,0 +1,137 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# Common PowerShell functions analogous to common.sh
|
||||
|
||||
function Get-RepoRoot {
|
||||
try {
|
||||
$result = git rev-parse --show-toplevel 2>$null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
return $result
|
||||
}
|
||||
} catch {
|
||||
# Git command failed
|
||||
}
|
||||
|
||||
# Fall back to script location for non-git repos
|
||||
return (Resolve-Path (Join-Path $PSScriptRoot "../../..")).Path
|
||||
}
|
||||
|
||||
function Get-CurrentBranch {
|
||||
# First check if SPECIFY_FEATURE environment variable is set
|
||||
if ($env:SPECIFY_FEATURE) {
|
||||
return $env:SPECIFY_FEATURE
|
||||
}
|
||||
|
||||
# Then check git if available
|
||||
try {
|
||||
$result = git rev-parse --abbrev-ref HEAD 2>$null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
return $result
|
||||
}
|
||||
} catch {
|
||||
# Git command failed
|
||||
}
|
||||
|
||||
# For non-git repos, try to find the latest feature directory
|
||||
$repoRoot = Get-RepoRoot
|
||||
$specsDir = Join-Path $repoRoot "specs"
|
||||
|
||||
if (Test-Path $specsDir) {
|
||||
$latestFeature = ""
|
||||
$highest = 0
|
||||
|
||||
Get-ChildItem -Path $specsDir -Directory | ForEach-Object {
|
||||
if ($_.Name -match '^(\d{3})-') {
|
||||
$num = [int]$matches[1]
|
||||
if ($num -gt $highest) {
|
||||
$highest = $num
|
||||
$latestFeature = $_.Name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ($latestFeature) {
|
||||
return $latestFeature
|
||||
}
|
||||
}
|
||||
|
||||
# Final fallback
|
||||
return "main"
|
||||
}
|
||||
|
||||
function Test-HasGit {
|
||||
try {
|
||||
git rev-parse --show-toplevel 2>$null | Out-Null
|
||||
return ($LASTEXITCODE -eq 0)
|
||||
} catch {
|
||||
return $false
|
||||
}
|
||||
}
|
||||
|
||||
function Test-FeatureBranch {
|
||||
param(
|
||||
[string]$Branch,
|
||||
[bool]$HasGit = $true
|
||||
)
|
||||
|
||||
# For non-git repos, we can't enforce branch naming but still provide output
|
||||
if (-not $HasGit) {
|
||||
Write-Warning "[specify] Warning: Git repository not detected; skipped branch validation"
|
||||
return $true
|
||||
}
|
||||
|
||||
if ($Branch -notmatch '^[0-9]{3}-') {
|
||||
Write-Output "ERROR: Not on a feature branch. Current branch: $Branch"
|
||||
Write-Output "Feature branches should be named like: 001-feature-name"
|
||||
return $false
|
||||
}
|
||||
return $true
|
||||
}
|
||||
|
||||
function Get-FeatureDir {
|
||||
param([string]$RepoRoot, [string]$Branch)
|
||||
Join-Path $RepoRoot "specs/$Branch"
|
||||
}
|
||||
|
||||
function Get-FeaturePathsEnv {
|
||||
$repoRoot = Get-RepoRoot
|
||||
$currentBranch = Get-CurrentBranch
|
||||
$hasGit = Test-HasGit
|
||||
$featureDir = Get-FeatureDir -RepoRoot $repoRoot -Branch $currentBranch
|
||||
|
||||
[PSCustomObject]@{
|
||||
REPO_ROOT = $repoRoot
|
||||
CURRENT_BRANCH = $currentBranch
|
||||
HAS_GIT = $hasGit
|
||||
FEATURE_DIR = $featureDir
|
||||
FEATURE_SPEC = Join-Path $featureDir 'spec.md'
|
||||
IMPL_PLAN = Join-Path $featureDir 'plan.md'
|
||||
TASKS = Join-Path $featureDir 'tasks.md'
|
||||
RESEARCH = Join-Path $featureDir 'research.md'
|
||||
DATA_MODEL = Join-Path $featureDir 'data-model.md'
|
||||
QUICKSTART = Join-Path $featureDir 'quickstart.md'
|
||||
CONTRACTS_DIR = Join-Path $featureDir 'contracts'
|
||||
}
|
||||
}
|
||||
|
||||
function Test-FileExists {
|
||||
param([string]$Path, [string]$Description)
|
||||
if (Test-Path -Path $Path -PathType Leaf) {
|
||||
Write-Output " ✓ $Description"
|
||||
return $true
|
||||
} else {
|
||||
Write-Output " ✗ $Description"
|
||||
return $false
|
||||
}
|
||||
}
|
||||
|
||||
function Test-DirHasFiles {
|
||||
param([string]$Path, [string]$Description)
|
||||
if ((Test-Path -Path $Path -PathType Container) -and (Get-ChildItem -Path $Path -ErrorAction SilentlyContinue | Where-Object { -not $_.PSIsContainer } | Select-Object -First 1)) {
|
||||
Write-Output " ✓ $Description"
|
||||
return $true
|
||||
} else {
|
||||
Write-Output " ✗ $Description"
|
||||
return $false
|
||||
}
|
||||
}
|
||||
|
||||
327
.specify/scripts/powershell/create-new-feature.ps1
Normal file
327
.specify/scripts/powershell/create-new-feature.ps1
Normal file
@@ -0,0 +1,327 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# Create a new feature
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[switch]$Json,
|
||||
[string]$ShortName,
|
||||
[int]$Number = 0,
|
||||
[switch]$Help,
|
||||
[Parameter(ValueFromRemainingArguments = $true)]
|
||||
[string[]]$FeatureDescription
|
||||
)
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Show help if requested
|
||||
if ($Help) {
|
||||
Write-Host "Usage: ./create-new-feature.ps1 [-Json] [-ShortName <name>] [-Number N] <feature description>"
|
||||
Write-Host ""
|
||||
Write-Host "Options:"
|
||||
Write-Host " -Json Output in JSON format"
|
||||
Write-Host " -ShortName <name> Provide a custom short name (2-4 words) for the branch"
|
||||
Write-Host " -Number N Specify branch number manually (overrides auto-detection)"
|
||||
Write-Host " -Help Show this help message"
|
||||
Write-Host ""
|
||||
Write-Host "Examples:"
|
||||
Write-Host " ./create-new-feature.ps1 'Add user authentication system' -ShortName 'user-auth'"
|
||||
Write-Host " ./create-new-feature.ps1 'Implement OAuth2 integration for API'"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Check if feature description provided
|
||||
if (-not $FeatureDescription -or $FeatureDescription.Count -eq 0) {
|
||||
Write-Error "Usage: ./create-new-feature.ps1 [-Json] [-ShortName <name>] <feature description>"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$featureDesc = ($FeatureDescription -join ' ').Trim()
|
||||
|
||||
# Resolve repository root. Prefer git information when available, but fall back
|
||||
# to searching for repository markers so the workflow still functions in repositories that
|
||||
# were initialized with --no-git.
|
||||
function Find-RepositoryRoot {
|
||||
param(
|
||||
[string]$StartDir,
|
||||
[string[]]$Markers = @('.git', '.specify')
|
||||
)
|
||||
$current = Resolve-Path $StartDir
|
||||
while ($true) {
|
||||
foreach ($marker in $Markers) {
|
||||
if (Test-Path (Join-Path $current $marker)) {
|
||||
return $current
|
||||
}
|
||||
}
|
||||
$parent = Split-Path $current -Parent
|
||||
if ($parent -eq $current) {
|
||||
# Reached filesystem root without finding markers
|
||||
return $null
|
||||
}
|
||||
$current = $parent
|
||||
}
|
||||
}
|
||||
|
||||
function Get-HighestNumberFromSpecs {
|
||||
param([string]$SpecsDir)
|
||||
|
||||
$highest = 0
|
||||
if (Test-Path $SpecsDir) {
|
||||
Get-ChildItem -Path $SpecsDir -Directory | ForEach-Object {
|
||||
if ($_.Name -match '^(\d+)') {
|
||||
$num = [int]$matches[1]
|
||||
if ($num -gt $highest) { $highest = $num }
|
||||
}
|
||||
}
|
||||
}
|
||||
return $highest
|
||||
}
|
||||
|
||||
function Get-HighestNumberFromBranches {
|
||||
param()
|
||||
|
||||
$highest = 0
|
||||
try {
|
||||
$branches = git branch -a 2>$null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
foreach ($branch in $branches) {
|
||||
# Clean branch name: remove leading markers and remote prefixes
|
||||
$cleanBranch = $branch.Trim() -replace '^\*?\s+', '' -replace '^remotes/[^/]+/', ''
|
||||
|
||||
# Extract feature number if branch matches pattern ###-*
|
||||
if ($cleanBranch -match '^(\d+)-') {
|
||||
$num = [int]$matches[1]
|
||||
if ($num -gt $highest) { $highest = $num }
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
# If git command fails, return 0
|
||||
Write-Verbose "Could not check Git branches: $_"
|
||||
}
|
||||
return $highest
|
||||
}
|
||||
|
||||
function Get-NextBranchNumber {
|
||||
param(
|
||||
[string]$ShortName,
|
||||
[string]$SpecsDir
|
||||
)
|
||||
|
||||
# Fetch all remotes to get latest branch info (suppress errors if no remotes)
|
||||
try {
|
||||
git fetch --all --prune 2>$null | Out-Null
|
||||
} catch {
|
||||
# Ignore fetch errors
|
||||
}
|
||||
|
||||
# Find remote branches matching the pattern using git ls-remote
|
||||
$remoteBranches = @()
|
||||
try {
|
||||
$remoteRefs = git ls-remote --heads origin 2>$null
|
||||
if ($remoteRefs) {
|
||||
$remoteBranches = $remoteRefs | Where-Object { $_ -match "refs/heads/(\d+)-$([regex]::Escape($ShortName))$" } | ForEach-Object {
|
||||
if ($_ -match "refs/heads/(\d+)-") {
|
||||
[int]$matches[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
# Ignore errors
|
||||
}
|
||||
|
||||
# Check local branches
|
||||
$localBranches = @()
|
||||
try {
|
||||
$allBranches = git branch 2>$null
|
||||
if ($allBranches) {
|
||||
$localBranches = $allBranches | Where-Object { $_ -match "^\*?\s*(\d+)-$([regex]::Escape($ShortName))$" } | ForEach-Object {
|
||||
if ($_ -match "(\d+)-") {
|
||||
[int]$matches[1]
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
# Ignore errors
|
||||
}
|
||||
|
||||
# Check specs directory
|
||||
$specDirs = @()
|
||||
if (Test-Path $SpecsDir) {
|
||||
try {
|
||||
$specDirs = Get-ChildItem -Path $SpecsDir -Directory | Where-Object { $_.Name -match "^(\d+)-$([regex]::Escape($ShortName))$" } | ForEach-Object {
|
||||
if ($_.Name -match "^(\d+)-") {
|
||||
[int]$matches[1]
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
# Ignore errors
|
||||
}
|
||||
}
|
||||
|
||||
# Combine all sources and get the highest number
|
||||
$maxNum = 0
|
||||
foreach ($num in ($remoteBranches + $localBranches + $specDirs)) {
|
||||
if ($num -gt $maxNum) {
|
||||
$maxNum = $num
|
||||
}
|
||||
}
|
||||
|
||||
# Return next number
|
||||
return $maxNum + 1
|
||||
}
|
||||
|
||||
function ConvertTo-CleanBranchName {
|
||||
param([string]$Name)
|
||||
|
||||
return $Name.ToLower() -replace '[^a-z0-9]', '-' -replace '-{2,}', '-' -replace '^-', '' -replace '-$', ''
|
||||
}
|
||||
$fallbackRoot = (Find-RepositoryRoot -StartDir $PSScriptRoot)
|
||||
if (-not $fallbackRoot) {
|
||||
Write-Error "Error: Could not determine repository root. Please run this script from within the repository."
|
||||
exit 1
|
||||
}
|
||||
|
||||
try {
|
||||
$repoRoot = git rev-parse --show-toplevel 2>$null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
$hasGit = $true
|
||||
} else {
|
||||
throw "Git not available"
|
||||
}
|
||||
} catch {
|
||||
$repoRoot = $fallbackRoot
|
||||
$hasGit = $false
|
||||
}
|
||||
|
||||
Set-Location $repoRoot
|
||||
|
||||
$specsDir = Join-Path $repoRoot 'specs'
|
||||
New-Item -ItemType Directory -Path $specsDir -Force | Out-Null
|
||||
|
||||
# Function to generate branch name with stop word filtering and length filtering
|
||||
function Get-BranchName {
|
||||
param([string]$Description)
|
||||
|
||||
# Common stop words to filter out
|
||||
$stopWords = @(
|
||||
'i', 'a', 'an', 'the', 'to', 'for', 'of', 'in', 'on', 'at', 'by', 'with', 'from',
|
||||
'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had',
|
||||
'do', 'does', 'did', 'will', 'would', 'should', 'could', 'can', 'may', 'might', 'must', 'shall',
|
||||
'this', 'that', 'these', 'those', 'my', 'your', 'our', 'their',
|
||||
'want', 'need', 'add', 'get', 'set'
|
||||
)
|
||||
|
||||
# Convert to lowercase and extract words (alphanumeric only)
|
||||
$cleanName = $Description.ToLower() -replace '[^a-z0-9\s]', ' '
|
||||
$words = $cleanName -split '\s+' | Where-Object { $_ }
|
||||
|
||||
# Filter words: remove stop words and words shorter than 3 chars (unless they're uppercase acronyms in original)
|
||||
$meaningfulWords = @()
|
||||
foreach ($word in $words) {
|
||||
# Skip stop words
|
||||
if ($stopWords -contains $word) { continue }
|
||||
|
||||
# Keep words that are length >= 3 OR appear as uppercase in original (likely acronyms)
|
||||
if ($word.Length -ge 3) {
|
||||
$meaningfulWords += $word
|
||||
} elseif ($Description -match "\b$($word.ToUpper())\b") {
|
||||
# Keep short words if they appear as uppercase in original (likely acronyms)
|
||||
$meaningfulWords += $word
|
||||
}
|
||||
}
|
||||
|
||||
# If we have meaningful words, use first 3-4 of them
|
||||
if ($meaningfulWords.Count -gt 0) {
|
||||
$maxWords = if ($meaningfulWords.Count -eq 4) { 4 } else { 3 }
|
||||
$result = ($meaningfulWords | Select-Object -First $maxWords) -join '-'
|
||||
return $result
|
||||
} else {
|
||||
# Fallback to original logic if no meaningful words found
|
||||
$result = ConvertTo-CleanBranchName -Name $Description
|
||||
$fallbackWords = ($result -split '-') | Where-Object { $_ } | Select-Object -First 3
|
||||
return [string]::Join('-', $fallbackWords)
|
||||
}
|
||||
}
|
||||
|
||||
# Generate branch name
|
||||
if ($ShortName) {
|
||||
# Use provided short name, just clean it up
|
||||
$branchSuffix = ConvertTo-CleanBranchName -Name $ShortName
|
||||
} else {
|
||||
# Generate from description with smart filtering
|
||||
$branchSuffix = Get-BranchName -Description $featureDesc
|
||||
}
|
||||
|
||||
# Determine branch number
|
||||
if ($Number -eq 0) {
|
||||
if ($hasGit) {
|
||||
# Check existing branches on remotes
|
||||
$Number = Get-NextBranchNumber -ShortName $branchSuffix -SpecsDir $specsDir
|
||||
} else {
|
||||
# Fall back to local directory check
|
||||
$Number = (Get-HighestNumberFromSpecs -SpecsDir $specsDir) + 1
|
||||
}
|
||||
}
|
||||
|
||||
$featureNum = ('{0:000}' -f $Number)
|
||||
$branchName = "$featureNum-$branchSuffix"
|
||||
|
||||
# GitHub enforces a 244-byte limit on branch names
|
||||
# Validate and truncate if necessary
|
||||
$maxBranchLength = 244
|
||||
if ($branchName.Length -gt $maxBranchLength) {
|
||||
# Calculate how much we need to trim from suffix
|
||||
# Account for: feature number (3) + hyphen (1) = 4 chars
|
||||
$maxSuffixLength = $maxBranchLength - 4
|
||||
|
||||
# Truncate suffix
|
||||
$truncatedSuffix = $branchSuffix.Substring(0, [Math]::Min($branchSuffix.Length, $maxSuffixLength))
|
||||
# Remove trailing hyphen if truncation created one
|
||||
$truncatedSuffix = $truncatedSuffix -replace '-$', ''
|
||||
|
||||
$originalBranchName = $branchName
|
||||
$branchName = "$featureNum-$truncatedSuffix"
|
||||
|
||||
Write-Warning "[specify] Branch name exceeded GitHub's 244-byte limit"
|
||||
Write-Warning "[specify] Original: $originalBranchName ($($originalBranchName.Length) bytes)"
|
||||
Write-Warning "[specify] Truncated to: $branchName ($($branchName.Length) bytes)"
|
||||
}
|
||||
|
||||
if ($hasGit) {
|
||||
try {
|
||||
git checkout -b $branchName | Out-Null
|
||||
} catch {
|
||||
Write-Warning "Failed to create git branch: $branchName"
|
||||
}
|
||||
} else {
|
||||
Write-Warning "[specify] Warning: Git repository not detected; skipped branch creation for $branchName"
|
||||
}
|
||||
|
||||
$featureDir = Join-Path $specsDir $branchName
|
||||
New-Item -ItemType Directory -Path $featureDir -Force | Out-Null
|
||||
|
||||
$template = Join-Path $repoRoot '.specify/templates/spec-template.md'
|
||||
$specFile = Join-Path $featureDir 'spec.md'
|
||||
if (Test-Path $template) {
|
||||
Copy-Item $template $specFile -Force
|
||||
} else {
|
||||
New-Item -ItemType File -Path $specFile | Out-Null
|
||||
}
|
||||
|
||||
# Set the SPECIFY_FEATURE environment variable for the current session
|
||||
$env:SPECIFY_FEATURE = $branchName
|
||||
|
||||
if ($Json) {
|
||||
$obj = [PSCustomObject]@{
|
||||
BRANCH_NAME = $branchName
|
||||
SPEC_FILE = $specFile
|
||||
FEATURE_NUM = $featureNum
|
||||
HAS_GIT = $hasGit
|
||||
}
|
||||
$obj | ConvertTo-Json -Compress
|
||||
} else {
|
||||
Write-Output "BRANCH_NAME: $branchName"
|
||||
Write-Output "SPEC_FILE: $specFile"
|
||||
Write-Output "FEATURE_NUM: $featureNum"
|
||||
Write-Output "HAS_GIT: $hasGit"
|
||||
Write-Output "SPECIFY_FEATURE environment variable set to: $branchName"
|
||||
}
|
||||
|
||||
61
.specify/scripts/powershell/setup-plan.ps1
Normal file
61
.specify/scripts/powershell/setup-plan.ps1
Normal file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env pwsh
|
||||
# Setup implementation plan for a feature
|
||||
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[switch]$Json,
|
||||
[switch]$Help
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Show help if requested
|
||||
if ($Help) {
|
||||
Write-Output "Usage: ./setup-plan.ps1 [-Json] [-Help]"
|
||||
Write-Output " -Json Output results in JSON format"
|
||||
Write-Output " -Help Show this help message"
|
||||
exit 0
|
||||
}
|
||||
|
||||
# Load common functions
|
||||
. "$PSScriptRoot/common.ps1"
|
||||
|
||||
# Get all paths and variables from common functions
|
||||
$paths = Get-FeaturePathsEnv
|
||||
|
||||
# Check if we're on a proper feature branch (only for git repos)
|
||||
if (-not (Test-FeatureBranch -Branch $paths.CURRENT_BRANCH -HasGit $paths.HAS_GIT)) {
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Ensure the feature directory exists
|
||||
New-Item -ItemType Directory -Path $paths.FEATURE_DIR -Force | Out-Null
|
||||
|
||||
# Copy plan template if it exists, otherwise note it or create empty file
|
||||
$template = Join-Path $paths.REPO_ROOT '.specify/templates/plan-template.md'
|
||||
if (Test-Path $template) {
|
||||
Copy-Item $template $paths.IMPL_PLAN -Force
|
||||
Write-Output "Copied plan template to $($paths.IMPL_PLAN)"
|
||||
} else {
|
||||
Write-Warning "Plan template not found at $template"
|
||||
# Create a basic plan file if template doesn't exist
|
||||
New-Item -ItemType File -Path $paths.IMPL_PLAN -Force | Out-Null
|
||||
}
|
||||
|
||||
# Output results
|
||||
if ($Json) {
|
||||
$result = [PSCustomObject]@{
|
||||
FEATURE_SPEC = $paths.FEATURE_SPEC
|
||||
IMPL_PLAN = $paths.IMPL_PLAN
|
||||
SPECS_DIR = $paths.FEATURE_DIR
|
||||
BRANCH = $paths.CURRENT_BRANCH
|
||||
HAS_GIT = $paths.HAS_GIT
|
||||
}
|
||||
$result | ConvertTo-Json -Compress
|
||||
} else {
|
||||
Write-Output "FEATURE_SPEC: $($paths.FEATURE_SPEC)"
|
||||
Write-Output "IMPL_PLAN: $($paths.IMPL_PLAN)"
|
||||
Write-Output "SPECS_DIR: $($paths.FEATURE_DIR)"
|
||||
Write-Output "BRANCH: $($paths.CURRENT_BRANCH)"
|
||||
Write-Output "HAS_GIT: $($paths.HAS_GIT)"
|
||||
}
|
||||
445
.specify/scripts/powershell/update-agent-context.ps1
Normal file
445
.specify/scripts/powershell/update-agent-context.ps1
Normal file
@@ -0,0 +1,445 @@
|
||||
#!/usr/bin/env pwsh
|
||||
<#!
|
||||
.SYNOPSIS
|
||||
Update agent context files with information from plan.md (PowerShell version)
|
||||
|
||||
.DESCRIPTION
|
||||
Mirrors the behavior of scripts/bash/update-agent-context.sh:
|
||||
1. Environment Validation
|
||||
2. Plan Data Extraction
|
||||
3. Agent File Management (create from template or update existing)
|
||||
4. Content Generation (technology stack, recent changes, timestamp)
|
||||
5. Multi-Agent Support (claude, gemini, copilot, cursor-agent, qwen, opencode, codex, windsurf, kilocode, auggie, roo, codebuddy, amp, shai, q, bob)
|
||||
|
||||
.PARAMETER AgentType
|
||||
Optional agent key to update a single agent. If omitted, updates all existing agent files (creating a default Claude file if none exist).
|
||||
|
||||
.EXAMPLE
|
||||
./update-agent-context.ps1 -AgentType claude
|
||||
|
||||
.EXAMPLE
|
||||
./update-agent-context.ps1 # Updates all existing agent files
|
||||
|
||||
.NOTES
|
||||
Relies on common helper functions in common.ps1
|
||||
#>
|
||||
param(
|
||||
[Parameter(Position=0)]
|
||||
[ValidateSet('claude','gemini','copilot','cursor-agent','qwen','opencode','codex','windsurf','kilocode','auggie','roo','codebuddy','amp','shai','q','bob')]
|
||||
[string]$AgentType
|
||||
)
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Import common helpers
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
. (Join-Path $ScriptDir 'common.ps1')
|
||||
|
||||
# Acquire environment paths
|
||||
$envData = Get-FeaturePathsEnv
|
||||
$REPO_ROOT = $envData.REPO_ROOT
|
||||
$CURRENT_BRANCH = $envData.CURRENT_BRANCH
|
||||
$HAS_GIT = $envData.HAS_GIT
|
||||
$IMPL_PLAN = $envData.IMPL_PLAN
|
||||
$NEW_PLAN = $IMPL_PLAN
|
||||
|
||||
# Agent file paths
|
||||
$CLAUDE_FILE = Join-Path $REPO_ROOT 'CLAUDE.md'
|
||||
$GEMINI_FILE = Join-Path $REPO_ROOT 'GEMINI.md'
|
||||
$COPILOT_FILE = Join-Path $REPO_ROOT '.github/agents/copilot-instructions.md'
|
||||
$CURSOR_FILE = Join-Path $REPO_ROOT '.cursor/rules/specify-rules.mdc'
|
||||
$QWEN_FILE = Join-Path $REPO_ROOT 'QWEN.md'
|
||||
$AGENTS_FILE = Join-Path $REPO_ROOT 'AGENTS.md'
|
||||
$WINDSURF_FILE = Join-Path $REPO_ROOT '.windsurf/rules/specify-rules.md'
|
||||
$KILOCODE_FILE = Join-Path $REPO_ROOT '.kilocode/rules/specify-rules.md'
|
||||
$AUGGIE_FILE = Join-Path $REPO_ROOT '.augment/rules/specify-rules.md'
|
||||
$ROO_FILE = Join-Path $REPO_ROOT '.roo/rules/specify-rules.md'
|
||||
$CODEBUDDY_FILE = Join-Path $REPO_ROOT 'CODEBUDDY.md'
|
||||
$AMP_FILE = Join-Path $REPO_ROOT 'AGENTS.md'
|
||||
$SHAI_FILE = Join-Path $REPO_ROOT 'SHAI.md'
|
||||
$Q_FILE = Join-Path $REPO_ROOT 'AGENTS.md'
|
||||
$BOB_FILE = Join-Path $REPO_ROOT 'AGENTS.md'
|
||||
|
||||
$TEMPLATE_FILE = Join-Path $REPO_ROOT '.specify/templates/agent-file-template.md'
|
||||
|
||||
# Parsed plan data placeholders
|
||||
$script:NEW_LANG = ''
|
||||
$script:NEW_FRAMEWORK = ''
|
||||
$script:NEW_DB = ''
|
||||
$script:NEW_PROJECT_TYPE = ''
|
||||
|
||||
function Write-Info {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Message
|
||||
)
|
||||
Write-Host "INFO: $Message"
|
||||
}
|
||||
|
||||
function Write-Success {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Message
|
||||
)
|
||||
Write-Host "$([char]0x2713) $Message"
|
||||
}
|
||||
|
||||
function Write-WarningMsg {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Message
|
||||
)
|
||||
Write-Warning $Message
|
||||
}
|
||||
|
||||
function Write-Err {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Message
|
||||
)
|
||||
Write-Host "ERROR: $Message" -ForegroundColor Red
|
||||
}
|
||||
|
||||
function Validate-Environment {
|
||||
if (-not $CURRENT_BRANCH) {
|
||||
Write-Err 'Unable to determine current feature'
|
||||
if ($HAS_GIT) { Write-Info "Make sure you're on a feature branch" } else { Write-Info 'Set SPECIFY_FEATURE environment variable or create a feature first' }
|
||||
exit 1
|
||||
}
|
||||
if (-not (Test-Path $NEW_PLAN)) {
|
||||
Write-Err "No plan.md found at $NEW_PLAN"
|
||||
Write-Info 'Ensure you are working on a feature with a corresponding spec directory'
|
||||
if (-not $HAS_GIT) { Write-Info 'Use: $env:SPECIFY_FEATURE=your-feature-name or create a new feature first' }
|
||||
exit 1
|
||||
}
|
||||
if (-not (Test-Path $TEMPLATE_FILE)) {
|
||||
Write-Err "Template file not found at $TEMPLATE_FILE"
|
||||
Write-Info 'Run specify init to scaffold .specify/templates, or add agent-file-template.md there.'
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
|
||||
function Extract-PlanField {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$FieldPattern,
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$PlanFile
|
||||
)
|
||||
if (-not (Test-Path $PlanFile)) { return '' }
|
||||
# Lines like **Language/Version**: Python 3.12
|
||||
$regex = "^\*\*$([Regex]::Escape($FieldPattern))\*\*: (.+)$"
|
||||
Get-Content -LiteralPath $PlanFile -Encoding utf8 | ForEach-Object {
|
||||
if ($_ -match $regex) {
|
||||
$val = $Matches[1].Trim()
|
||||
if ($val -notin @('NEEDS CLARIFICATION','N/A')) { return $val }
|
||||
}
|
||||
} | Select-Object -First 1
|
||||
}
|
||||
|
||||
function Parse-PlanData {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$PlanFile
|
||||
)
|
||||
if (-not (Test-Path $PlanFile)) { Write-Err "Plan file not found: $PlanFile"; return $false }
|
||||
Write-Info "Parsing plan data from $PlanFile"
|
||||
$script:NEW_LANG = Extract-PlanField -FieldPattern 'Language/Version' -PlanFile $PlanFile
|
||||
$script:NEW_FRAMEWORK = Extract-PlanField -FieldPattern 'Primary Dependencies' -PlanFile $PlanFile
|
||||
$script:NEW_DB = Extract-PlanField -FieldPattern 'Storage' -PlanFile $PlanFile
|
||||
$script:NEW_PROJECT_TYPE = Extract-PlanField -FieldPattern 'Project Type' -PlanFile $PlanFile
|
||||
|
||||
if ($NEW_LANG) { Write-Info "Found language: $NEW_LANG" } else { Write-WarningMsg 'No language information found in plan' }
|
||||
if ($NEW_FRAMEWORK) { Write-Info "Found framework: $NEW_FRAMEWORK" }
|
||||
if ($NEW_DB -and $NEW_DB -ne 'N/A') { Write-Info "Found database: $NEW_DB" }
|
||||
if ($NEW_PROJECT_TYPE) { Write-Info "Found project type: $NEW_PROJECT_TYPE" }
|
||||
return $true
|
||||
}
|
||||
|
||||
function Format-TechnologyStack {
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[string]$Lang,
|
||||
[Parameter(Mandatory=$false)]
|
||||
[string]$Framework
|
||||
)
|
||||
$parts = @()
|
||||
if ($Lang -and $Lang -ne 'NEEDS CLARIFICATION') { $parts += $Lang }
|
||||
if ($Framework -and $Framework -notin @('NEEDS CLARIFICATION','N/A')) { $parts += $Framework }
|
||||
if (-not $parts) { return '' }
|
||||
return ($parts -join ' + ')
|
||||
}
|
||||
|
||||
function Get-ProjectStructure {
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[string]$ProjectType
|
||||
)
|
||||
if ($ProjectType -match 'web') { return "backend/`nfrontend/`ntests/" } else { return "src/`ntests/" }
|
||||
}
|
||||
|
||||
function Get-CommandsForLanguage {
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[string]$Lang
|
||||
)
|
||||
switch -Regex ($Lang) {
|
||||
'Python' { return "cd src; pytest; ruff check ." }
|
||||
'Rust' { return "cargo test; cargo clippy" }
|
||||
'JavaScript|TypeScript' { return "npm test; npm run lint" }
|
||||
default { return "# Add commands for $Lang" }
|
||||
}
|
||||
}
|
||||
|
||||
function Get-LanguageConventions {
|
||||
param(
|
||||
[Parameter(Mandatory=$false)]
|
||||
[string]$Lang
|
||||
)
|
||||
if ($Lang) { "${Lang}: Follow standard conventions" } else { 'General: Follow standard conventions' }
|
||||
}
|
||||
|
||||
function New-AgentFile {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$TargetFile,
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$ProjectName,
|
||||
[Parameter(Mandatory=$true)]
|
||||
[datetime]$Date
|
||||
)
|
||||
if (-not (Test-Path $TEMPLATE_FILE)) { Write-Err "Template not found at $TEMPLATE_FILE"; return $false }
|
||||
$temp = New-TemporaryFile
|
||||
Copy-Item -LiteralPath $TEMPLATE_FILE -Destination $temp -Force
|
||||
|
||||
$projectStructure = Get-ProjectStructure -ProjectType $NEW_PROJECT_TYPE
|
||||
$commands = Get-CommandsForLanguage -Lang $NEW_LANG
|
||||
$languageConventions = Get-LanguageConventions -Lang $NEW_LANG
|
||||
|
||||
$escaped_lang = $NEW_LANG
|
||||
$escaped_framework = $NEW_FRAMEWORK
|
||||
$escaped_branch = $CURRENT_BRANCH
|
||||
|
||||
$content = Get-Content -LiteralPath $temp -Raw -Encoding utf8
|
||||
$content = $content -replace '\[PROJECT NAME\]',$ProjectName
|
||||
$content = $content -replace '\[DATE\]',$Date.ToString('yyyy-MM-dd')
|
||||
|
||||
# Build the technology stack string safely
|
||||
$techStackForTemplate = ""
|
||||
if ($escaped_lang -and $escaped_framework) {
|
||||
$techStackForTemplate = "- $escaped_lang + $escaped_framework ($escaped_branch)"
|
||||
} elseif ($escaped_lang) {
|
||||
$techStackForTemplate = "- $escaped_lang ($escaped_branch)"
|
||||
} elseif ($escaped_framework) {
|
||||
$techStackForTemplate = "- $escaped_framework ($escaped_branch)"
|
||||
}
|
||||
|
||||
$content = $content -replace '\[EXTRACTED FROM ALL PLAN.MD FILES\]',$techStackForTemplate
|
||||
# For project structure we manually embed (keep newlines)
|
||||
$escapedStructure = [Regex]::Escape($projectStructure)
|
||||
$content = $content -replace '\[ACTUAL STRUCTURE FROM PLANS\]',$escapedStructure
|
||||
# Replace escaped newlines placeholder after all replacements
|
||||
$content = $content -replace '\[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES\]',$commands
|
||||
$content = $content -replace '\[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE\]',$languageConventions
|
||||
|
||||
# Build the recent changes string safely
|
||||
$recentChangesForTemplate = ""
|
||||
if ($escaped_lang -and $escaped_framework) {
|
||||
$recentChangesForTemplate = "- ${escaped_branch}: Added ${escaped_lang} + ${escaped_framework}"
|
||||
} elseif ($escaped_lang) {
|
||||
$recentChangesForTemplate = "- ${escaped_branch}: Added ${escaped_lang}"
|
||||
} elseif ($escaped_framework) {
|
||||
$recentChangesForTemplate = "- ${escaped_branch}: Added ${escaped_framework}"
|
||||
}
|
||||
|
||||
$content = $content -replace '\[LAST 3 FEATURES AND WHAT THEY ADDED\]',$recentChangesForTemplate
|
||||
# Convert literal \n sequences introduced by Escape to real newlines
|
||||
$content = $content -replace '\\n',[Environment]::NewLine
|
||||
|
||||
$parent = Split-Path -Parent $TargetFile
|
||||
if (-not (Test-Path $parent)) { New-Item -ItemType Directory -Path $parent | Out-Null }
|
||||
Set-Content -LiteralPath $TargetFile -Value $content -NoNewline -Encoding utf8
|
||||
Remove-Item $temp -Force
|
||||
return $true
|
||||
}
|
||||
|
||||
function Update-ExistingAgentFile {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$TargetFile,
|
||||
[Parameter(Mandatory=$true)]
|
||||
[datetime]$Date
|
||||
)
|
||||
if (-not (Test-Path $TargetFile)) { return (New-AgentFile -TargetFile $TargetFile -ProjectName (Split-Path $REPO_ROOT -Leaf) -Date $Date) }
|
||||
|
||||
$techStack = Format-TechnologyStack -Lang $NEW_LANG -Framework $NEW_FRAMEWORK
|
||||
$newTechEntries = @()
|
||||
if ($techStack) {
|
||||
$escapedTechStack = [Regex]::Escape($techStack)
|
||||
if (-not (Select-String -Pattern $escapedTechStack -Path $TargetFile -Quiet)) {
|
||||
$newTechEntries += "- $techStack ($CURRENT_BRANCH)"
|
||||
}
|
||||
}
|
||||
if ($NEW_DB -and $NEW_DB -notin @('N/A','NEEDS CLARIFICATION')) {
|
||||
$escapedDB = [Regex]::Escape($NEW_DB)
|
||||
if (-not (Select-String -Pattern $escapedDB -Path $TargetFile -Quiet)) {
|
||||
$newTechEntries += "- $NEW_DB ($CURRENT_BRANCH)"
|
||||
}
|
||||
}
|
||||
$newChangeEntry = ''
|
||||
if ($techStack) { $newChangeEntry = "- ${CURRENT_BRANCH}: Added ${techStack}" }
|
||||
elseif ($NEW_DB -and $NEW_DB -notin @('N/A','NEEDS CLARIFICATION')) { $newChangeEntry = "- ${CURRENT_BRANCH}: Added ${NEW_DB}" }
|
||||
|
||||
$lines = Get-Content -LiteralPath $TargetFile -Encoding utf8
|
||||
$output = New-Object System.Collections.Generic.List[string]
|
||||
$inTech = $false; $inChanges = $false; $techAdded = $false; $changeAdded = $false; $existingChanges = 0
|
||||
|
||||
for ($i=0; $i -lt $lines.Count; $i++) {
|
||||
$line = $lines[$i]
|
||||
if ($line -eq '## Active Technologies') {
|
||||
$output.Add($line)
|
||||
$inTech = $true
|
||||
continue
|
||||
}
|
||||
if ($inTech -and $line -match '^##\s') {
|
||||
if (-not $techAdded -and $newTechEntries.Count -gt 0) { $newTechEntries | ForEach-Object { $output.Add($_) }; $techAdded = $true }
|
||||
$output.Add($line); $inTech = $false; continue
|
||||
}
|
||||
if ($inTech -and [string]::IsNullOrWhiteSpace($line)) {
|
||||
if (-not $techAdded -and $newTechEntries.Count -gt 0) { $newTechEntries | ForEach-Object { $output.Add($_) }; $techAdded = $true }
|
||||
$output.Add($line); continue
|
||||
}
|
||||
if ($line -eq '## Recent Changes') {
|
||||
$output.Add($line)
|
||||
if ($newChangeEntry) { $output.Add($newChangeEntry); $changeAdded = $true }
|
||||
$inChanges = $true
|
||||
continue
|
||||
}
|
||||
if ($inChanges -and $line -match '^##\s') { $output.Add($line); $inChanges = $false; continue }
|
||||
if ($inChanges -and $line -match '^- ') {
|
||||
if ($existingChanges -lt 2) { $output.Add($line); $existingChanges++ }
|
||||
continue
|
||||
}
|
||||
if ($line -match '\*\*Last updated\*\*: .*\d{4}-\d{2}-\d{2}') {
|
||||
$output.Add(($line -replace '\d{4}-\d{2}-\d{2}',$Date.ToString('yyyy-MM-dd')))
|
||||
continue
|
||||
}
|
||||
$output.Add($line)
|
||||
}
|
||||
|
||||
# Post-loop check: if we're still in the Active Technologies section and haven't added new entries
|
||||
if ($inTech -and -not $techAdded -and $newTechEntries.Count -gt 0) {
|
||||
$newTechEntries | ForEach-Object { $output.Add($_) }
|
||||
}
|
||||
|
||||
Set-Content -LiteralPath $TargetFile -Value ($output -join [Environment]::NewLine) -Encoding utf8
|
||||
return $true
|
||||
}
|
||||
|
||||
function Update-AgentFile {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$TargetFile,
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$AgentName
|
||||
)
|
||||
if (-not $TargetFile -or -not $AgentName) { Write-Err 'Update-AgentFile requires TargetFile and AgentName'; return $false }
|
||||
Write-Info "Updating $AgentName context file: $TargetFile"
|
||||
$projectName = Split-Path $REPO_ROOT -Leaf
|
||||
$date = Get-Date
|
||||
|
||||
$dir = Split-Path -Parent $TargetFile
|
||||
if (-not (Test-Path $dir)) { New-Item -ItemType Directory -Path $dir | Out-Null }
|
||||
|
||||
if (-not (Test-Path $TargetFile)) {
|
||||
if (New-AgentFile -TargetFile $TargetFile -ProjectName $projectName -Date $date) { Write-Success "Created new $AgentName context file" } else { Write-Err 'Failed to create new agent file'; return $false }
|
||||
} else {
|
||||
try {
|
||||
if (Update-ExistingAgentFile -TargetFile $TargetFile -Date $date) { Write-Success "Updated existing $AgentName context file" } else { Write-Err 'Failed to update agent file'; return $false }
|
||||
} catch {
|
||||
Write-Err "Cannot access or update existing file: $TargetFile. $_"
|
||||
return $false
|
||||
}
|
||||
}
|
||||
return $true
|
||||
}
|
||||
|
||||
function Update-SpecificAgent {
|
||||
param(
|
||||
[Parameter(Mandatory=$true)]
|
||||
[string]$Type
|
||||
)
|
||||
switch ($Type) {
|
||||
'claude' { Update-AgentFile -TargetFile $CLAUDE_FILE -AgentName 'Claude Code' }
|
||||
'gemini' { Update-AgentFile -TargetFile $GEMINI_FILE -AgentName 'Gemini CLI' }
|
||||
'copilot' { Update-AgentFile -TargetFile $COPILOT_FILE -AgentName 'GitHub Copilot' }
|
||||
'cursor-agent' { Update-AgentFile -TargetFile $CURSOR_FILE -AgentName 'Cursor IDE' }
|
||||
'qwen' { Update-AgentFile -TargetFile $QWEN_FILE -AgentName 'Qwen Code' }
|
||||
'opencode' { Update-AgentFile -TargetFile $AGENTS_FILE -AgentName 'opencode' }
|
||||
'codex' { Update-AgentFile -TargetFile $AGENTS_FILE -AgentName 'Codex CLI' }
|
||||
'windsurf' { Update-AgentFile -TargetFile $WINDSURF_FILE -AgentName 'Windsurf' }
|
||||
'kilocode' { Update-AgentFile -TargetFile $KILOCODE_FILE -AgentName 'Kilo Code' }
|
||||
'auggie' { Update-AgentFile -TargetFile $AUGGIE_FILE -AgentName 'Auggie CLI' }
|
||||
'roo' { Update-AgentFile -TargetFile $ROO_FILE -AgentName 'Roo Code' }
|
||||
'codebuddy' { Update-AgentFile -TargetFile $CODEBUDDY_FILE -AgentName 'CodeBuddy CLI' }
|
||||
'amp' { Update-AgentFile -TargetFile $AMP_FILE -AgentName 'Amp' }
|
||||
'shai' { Update-AgentFile -TargetFile $SHAI_FILE -AgentName 'SHAI' }
|
||||
'q' { Update-AgentFile -TargetFile $Q_FILE -AgentName 'Amazon Q Developer CLI' }
|
||||
'bob' { Update-AgentFile -TargetFile $BOB_FILE -AgentName 'IBM Bob' }
|
||||
default { Write-Err "Unknown agent type '$Type'"; Write-Err 'Expected: claude|gemini|copilot|cursor-agent|qwen|opencode|codex|windsurf|kilocode|auggie|roo|codebuddy|amp|shai|q|bob'; return $false }
|
||||
}
|
||||
}
|
||||
|
||||
function Update-AllExistingAgents {
|
||||
$found = $false
|
||||
$ok = $true
|
||||
if (Test-Path $CLAUDE_FILE) { if (-not (Update-AgentFile -TargetFile $CLAUDE_FILE -AgentName 'Claude Code')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $GEMINI_FILE) { if (-not (Update-AgentFile -TargetFile $GEMINI_FILE -AgentName 'Gemini CLI')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $COPILOT_FILE) { if (-not (Update-AgentFile -TargetFile $COPILOT_FILE -AgentName 'GitHub Copilot')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $CURSOR_FILE) { if (-not (Update-AgentFile -TargetFile $CURSOR_FILE -AgentName 'Cursor IDE')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $QWEN_FILE) { if (-not (Update-AgentFile -TargetFile $QWEN_FILE -AgentName 'Qwen Code')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $AGENTS_FILE) { if (-not (Update-AgentFile -TargetFile $AGENTS_FILE -AgentName 'Codex/opencode')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $WINDSURF_FILE) { if (-not (Update-AgentFile -TargetFile $WINDSURF_FILE -AgentName 'Windsurf')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $KILOCODE_FILE) { if (-not (Update-AgentFile -TargetFile $KILOCODE_FILE -AgentName 'Kilo Code')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $AUGGIE_FILE) { if (-not (Update-AgentFile -TargetFile $AUGGIE_FILE -AgentName 'Auggie CLI')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $ROO_FILE) { if (-not (Update-AgentFile -TargetFile $ROO_FILE -AgentName 'Roo Code')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $CODEBUDDY_FILE) { if (-not (Update-AgentFile -TargetFile $CODEBUDDY_FILE -AgentName 'CodeBuddy CLI')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $SHAI_FILE) { if (-not (Update-AgentFile -TargetFile $SHAI_FILE -AgentName 'SHAI')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $Q_FILE) { if (-not (Update-AgentFile -TargetFile $Q_FILE -AgentName 'Amazon Q Developer CLI')) { $ok = $false }; $found = $true }
|
||||
if (Test-Path $BOB_FILE) { if (-not (Update-AgentFile -TargetFile $BOB_FILE -AgentName 'IBM Bob')) { $ok = $false }; $found = $true }
|
||||
if (-not $found) {
|
||||
Write-Info 'No existing agent files found, creating default Claude file...'
|
||||
if (-not (Update-AgentFile -TargetFile $CLAUDE_FILE -AgentName 'Claude Code')) { $ok = $false }
|
||||
}
|
||||
return $ok
|
||||
}
|
||||
|
||||
function Print-Summary {
|
||||
Write-Host ''
|
||||
Write-Info 'Summary of changes:'
|
||||
if ($NEW_LANG) { Write-Host " - Added language: $NEW_LANG" }
|
||||
if ($NEW_FRAMEWORK) { Write-Host " - Added framework: $NEW_FRAMEWORK" }
|
||||
if ($NEW_DB -and $NEW_DB -ne 'N/A') { Write-Host " - Added database: $NEW_DB" }
|
||||
Write-Host ''
|
||||
Write-Info 'Usage: ./update-agent-context.ps1 [-AgentType claude|gemini|copilot|cursor-agent|qwen|opencode|codex|windsurf|kilocode|auggie|roo|codebuddy|amp|shai|q|bob]'
|
||||
}
|
||||
|
||||
function Main {
|
||||
Validate-Environment
|
||||
Write-Info "=== Updating agent context files for feature $CURRENT_BRANCH ==="
|
||||
if (-not (Parse-PlanData -PlanFile $NEW_PLAN)) { Write-Err 'Failed to parse plan data'; exit 1 }
|
||||
$success = $true
|
||||
if ($AgentType) {
|
||||
Write-Info "Updating specific agent: $AgentType"
|
||||
if (-not (Update-SpecificAgent -Type $AgentType)) { $success = $false }
|
||||
}
|
||||
else {
|
||||
Write-Info 'No agent specified, updating all existing agent files...'
|
||||
if (-not (Update-AllExistingAgents)) { $success = $false }
|
||||
}
|
||||
Print-Summary
|
||||
if ($success) { Write-Success 'Agent context update completed successfully'; exit 0 } else { Write-Err 'Agent context update completed with errors'; exit 1 }
|
||||
}
|
||||
|
||||
Main
|
||||
|
||||
28
.specify/templates/agent-file-template.md
Normal file
28
.specify/templates/agent-file-template.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# [PROJECT NAME] Development Guidelines
|
||||
|
||||
Auto-generated from all feature plans. Last updated: [DATE]
|
||||
|
||||
## Active Technologies
|
||||
|
||||
[EXTRACTED FROM ALL PLAN.MD FILES]
|
||||
|
||||
## Project Structure
|
||||
|
||||
```text
|
||||
[ACTUAL STRUCTURE FROM PLANS]
|
||||
```
|
||||
|
||||
## Commands
|
||||
|
||||
[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES]
|
||||
|
||||
## Code Style
|
||||
|
||||
[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE]
|
||||
|
||||
## Recent Changes
|
||||
|
||||
[LAST 3 FEATURES AND WHAT THEY ADDED]
|
||||
|
||||
<!-- MANUAL ADDITIONS START -->
|
||||
<!-- MANUAL ADDITIONS END -->
|
||||
40
.specify/templates/checklist-template.md
Normal file
40
.specify/templates/checklist-template.md
Normal file
@@ -0,0 +1,40 @@
|
||||
# [CHECKLIST TYPE] Checklist: [FEATURE NAME]
|
||||
|
||||
**Purpose**: [Brief description of what this checklist covers]
|
||||
**Created**: [DATE]
|
||||
**Feature**: [Link to spec.md or relevant documentation]
|
||||
|
||||
**Note**: This checklist is generated by the `/speckit.checklist` command based on feature context and requirements.
|
||||
|
||||
<!--
|
||||
============================================================================
|
||||
IMPORTANT: The checklist items below are SAMPLE ITEMS for illustration only.
|
||||
|
||||
The /speckit.checklist command MUST replace these with actual items based on:
|
||||
- User's specific checklist request
|
||||
- Feature requirements from spec.md
|
||||
- Technical context from plan.md
|
||||
- Implementation details from tasks.md
|
||||
|
||||
DO NOT keep these sample items in the generated checklist file.
|
||||
============================================================================
|
||||
-->
|
||||
|
||||
## [Category 1]
|
||||
|
||||
- [ ] CHK001 First checklist item with clear action
|
||||
- [ ] CHK002 Second checklist item
|
||||
- [ ] CHK003 Third checklist item
|
||||
|
||||
## [Category 2]
|
||||
|
||||
- [ ] CHK004 Another category item
|
||||
- [ ] CHK005 Item with specific criteria
|
||||
- [ ] CHK006 Final item in this category
|
||||
|
||||
## Notes
|
||||
|
||||
- Check items off as completed: `[x]`
|
||||
- Add comments or findings inline
|
||||
- Link to relevant resources or documentation
|
||||
- Items are numbered sequentially for easy reference
|
||||
104
.specify/templates/plan-template.md
Normal file
104
.specify/templates/plan-template.md
Normal file
@@ -0,0 +1,104 @@
|
||||
# Implementation Plan: [FEATURE]
|
||||
|
||||
**Branch**: `[###-feature-name]` | **Date**: [DATE] | **Spec**: [link]
|
||||
**Input**: Feature specification from `/specs/[###-feature-name]/spec.md`
|
||||
|
||||
**Note**: This template is filled in by the `/speckit.plan` command. See `.specify/templates/commands/plan.md` for the execution workflow.
|
||||
|
||||
## Summary
|
||||
|
||||
[Extract from feature spec: primary requirement + technical approach from research]
|
||||
|
||||
## Technical Context
|
||||
|
||||
<!--
|
||||
ACTION REQUIRED: Replace the content in this section with the technical details
|
||||
for the project. The structure here is presented in advisory capacity to guide
|
||||
the iteration process.
|
||||
-->
|
||||
|
||||
**Language/Version**: [e.g., Python 3.11, Swift 5.9, Rust 1.75 or NEEDS CLARIFICATION]
|
||||
**Primary Dependencies**: [e.g., FastAPI, UIKit, LLVM or NEEDS CLARIFICATION]
|
||||
**Storage**: [if applicable, e.g., PostgreSQL, CoreData, files or N/A]
|
||||
**Testing**: [e.g., pytest, XCTest, cargo test or NEEDS CLARIFICATION]
|
||||
**Target Platform**: [e.g., Linux server, iOS 15+, WASM or NEEDS CLARIFICATION]
|
||||
**Project Type**: [single/web/mobile - determines source structure]
|
||||
**Performance Goals**: [domain-specific, e.g., 1000 req/s, 10k lines/sec, 60 fps or NEEDS CLARIFICATION]
|
||||
**Constraints**: [domain-specific, e.g., <200ms p95, <100MB memory, offline-capable or NEEDS CLARIFICATION]
|
||||
**Scale/Scope**: [domain-specific, e.g., 10k users, 1M LOC, 50 screens or NEEDS CLARIFICATION]
|
||||
|
||||
## Constitution Check
|
||||
|
||||
*GATE: Must pass before Phase 0 research. Re-check after Phase 1 design.*
|
||||
|
||||
[Gates determined based on constitution file]
|
||||
|
||||
## Project Structure
|
||||
|
||||
### Documentation (this feature)
|
||||
|
||||
```text
|
||||
specs/[###-feature]/
|
||||
├── plan.md # This file (/speckit.plan command output)
|
||||
├── research.md # Phase 0 output (/speckit.plan command)
|
||||
├── data-model.md # Phase 1 output (/speckit.plan command)
|
||||
├── quickstart.md # Phase 1 output (/speckit.plan command)
|
||||
├── contracts/ # Phase 1 output (/speckit.plan command)
|
||||
└── tasks.md # Phase 2 output (/speckit.tasks command - NOT created by /speckit.plan)
|
||||
```
|
||||
|
||||
### Source Code (repository root)
|
||||
<!--
|
||||
ACTION REQUIRED: Replace the placeholder tree below with the concrete layout
|
||||
for this feature. Delete unused options and expand the chosen structure with
|
||||
real paths (e.g., apps/admin, packages/something). The delivered plan must
|
||||
not include Option labels.
|
||||
-->
|
||||
|
||||
```text
|
||||
# [REMOVE IF UNUSED] Option 1: Single project (DEFAULT)
|
||||
src/
|
||||
├── models/
|
||||
├── services/
|
||||
├── cli/
|
||||
└── lib/
|
||||
|
||||
tests/
|
||||
├── contract/
|
||||
├── integration/
|
||||
└── unit/
|
||||
|
||||
# [REMOVE IF UNUSED] Option 2: Web application (when "frontend" + "backend" detected)
|
||||
backend/
|
||||
├── src/
|
||||
│ ├── models/
|
||||
│ ├── services/
|
||||
│ └── api/
|
||||
└── tests/
|
||||
|
||||
frontend/
|
||||
├── src/
|
||||
│ ├── components/
|
||||
│ ├── pages/
|
||||
│ └── services/
|
||||
└── tests/
|
||||
|
||||
# [REMOVE IF UNUSED] Option 3: Mobile + API (when "iOS/Android" detected)
|
||||
api/
|
||||
└── [same as backend above]
|
||||
|
||||
ios/ or android/
|
||||
└── [platform-specific structure: feature modules, UI flows, platform tests]
|
||||
```
|
||||
|
||||
**Structure Decision**: [Document the selected structure and reference the real
|
||||
directories captured above]
|
||||
|
||||
## Complexity Tracking
|
||||
|
||||
> **Fill ONLY if Constitution Check has violations that must be justified**
|
||||
|
||||
| Violation | Why Needed | Simpler Alternative Rejected Because |
|
||||
|-----------|------------|-------------------------------------|
|
||||
| [e.g., 4th project] | [current need] | [why 3 projects insufficient] |
|
||||
| [e.g., Repository pattern] | [specific problem] | [why direct DB access insufficient] |
|
||||
115
.specify/templates/spec-template.md
Normal file
115
.specify/templates/spec-template.md
Normal file
@@ -0,0 +1,115 @@
|
||||
# Feature Specification: [FEATURE NAME]
|
||||
|
||||
**Feature Branch**: `[###-feature-name]`
|
||||
**Created**: [DATE]
|
||||
**Status**: Draft
|
||||
**Input**: User description: "$ARGUMENTS"
|
||||
|
||||
## User Scenarios & Testing *(mandatory)*
|
||||
|
||||
<!--
|
||||
IMPORTANT: User stories should be PRIORITIZED as user journeys ordered by importance.
|
||||
Each user story/journey must be INDEPENDENTLY TESTABLE - meaning if you implement just ONE of them,
|
||||
you should still have a viable MVP (Minimum Viable Product) that delivers value.
|
||||
|
||||
Assign priorities (P1, P2, P3, etc.) to each story, where P1 is the most critical.
|
||||
Think of each story as a standalone slice of functionality that can be:
|
||||
- Developed independently
|
||||
- Tested independently
|
||||
- Deployed independently
|
||||
- Demonstrated to users independently
|
||||
-->
|
||||
|
||||
### User Story 1 - [Brief Title] (Priority: P1)
|
||||
|
||||
[Describe this user journey in plain language]
|
||||
|
||||
**Why this priority**: [Explain the value and why it has this priority level]
|
||||
|
||||
**Independent Test**: [Describe how this can be tested independently - e.g., "Can be fully tested by [specific action] and delivers [specific value]"]
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
|
||||
2. **Given** [initial state], **When** [action], **Then** [expected outcome]
|
||||
|
||||
---
|
||||
|
||||
### User Story 2 - [Brief Title] (Priority: P2)
|
||||
|
||||
[Describe this user journey in plain language]
|
||||
|
||||
**Why this priority**: [Explain the value and why it has this priority level]
|
||||
|
||||
**Independent Test**: [Describe how this can be tested independently]
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
|
||||
|
||||
---
|
||||
|
||||
### User Story 3 - [Brief Title] (Priority: P3)
|
||||
|
||||
[Describe this user journey in plain language]
|
||||
|
||||
**Why this priority**: [Explain the value and why it has this priority level]
|
||||
|
||||
**Independent Test**: [Describe how this can be tested independently]
|
||||
|
||||
**Acceptance Scenarios**:
|
||||
|
||||
1. **Given** [initial state], **When** [action], **Then** [expected outcome]
|
||||
|
||||
---
|
||||
|
||||
[Add more user stories as needed, each with an assigned priority]
|
||||
|
||||
### Edge Cases
|
||||
|
||||
<!--
|
||||
ACTION REQUIRED: The content in this section represents placeholders.
|
||||
Fill them out with the right edge cases.
|
||||
-->
|
||||
|
||||
- What happens when [boundary condition]?
|
||||
- How does system handle [error scenario]?
|
||||
|
||||
## Requirements *(mandatory)*
|
||||
|
||||
<!--
|
||||
ACTION REQUIRED: The content in this section represents placeholders.
|
||||
Fill them out with the right functional requirements.
|
||||
-->
|
||||
|
||||
### Functional Requirements
|
||||
|
||||
- **FR-001**: System MUST [specific capability, e.g., "allow users to create accounts"]
|
||||
- **FR-002**: System MUST [specific capability, e.g., "validate email addresses"]
|
||||
- **FR-003**: Users MUST be able to [key interaction, e.g., "reset their password"]
|
||||
- **FR-004**: System MUST [data requirement, e.g., "persist user preferences"]
|
||||
- **FR-005**: System MUST [behavior, e.g., "log all security events"]
|
||||
|
||||
*Example of marking unclear requirements:*
|
||||
|
||||
- **FR-006**: System MUST authenticate users via [NEEDS CLARIFICATION: auth method not specified - email/password, SSO, OAuth?]
|
||||
- **FR-007**: System MUST retain user data for [NEEDS CLARIFICATION: retention period not specified]
|
||||
|
||||
### Key Entities *(include if feature involves data)*
|
||||
|
||||
- **[Entity 1]**: [What it represents, key attributes without implementation]
|
||||
- **[Entity 2]**: [What it represents, relationships to other entities]
|
||||
|
||||
## Success Criteria *(mandatory)*
|
||||
|
||||
<!--
|
||||
ACTION REQUIRED: Define measurable success criteria.
|
||||
These must be technology-agnostic and measurable.
|
||||
-->
|
||||
|
||||
### Measurable Outcomes
|
||||
|
||||
- **SC-001**: [Measurable metric, e.g., "Users can complete account creation in under 2 minutes"]
|
||||
- **SC-002**: [Measurable metric, e.g., "System handles 1000 concurrent users without degradation"]
|
||||
- **SC-003**: [User satisfaction metric, e.g., "90% of users successfully complete primary task on first attempt"]
|
||||
- **SC-004**: [Business metric, e.g., "Reduce support tickets related to [X] by 50%"]
|
||||
251
.specify/templates/tasks-template.md
Normal file
251
.specify/templates/tasks-template.md
Normal file
@@ -0,0 +1,251 @@
|
||||
---
|
||||
|
||||
description: "Task list template for feature implementation"
|
||||
---
|
||||
|
||||
# Tasks: [FEATURE NAME]
|
||||
|
||||
**Input**: Design documents from `/specs/[###-feature-name]/`
|
||||
**Prerequisites**: plan.md (required), spec.md (required for user stories), research.md, data-model.md, contracts/
|
||||
|
||||
**Tests**: The examples below include test tasks. Tests are OPTIONAL - only include them if explicitly requested in the feature specification.
|
||||
|
||||
**Organization**: Tasks are grouped by user story to enable independent implementation and testing of each story.
|
||||
|
||||
## Format: `[ID] [P?] [Story] Description`
|
||||
|
||||
- **[P]**: Can run in parallel (different files, no dependencies)
|
||||
- **[Story]**: Which user story this task belongs to (e.g., US1, US2, US3)
|
||||
- Include exact file paths in descriptions
|
||||
|
||||
## Path Conventions
|
||||
|
||||
- **Single project**: `src/`, `tests/` at repository root
|
||||
- **Web app**: `backend/src/`, `frontend/src/`
|
||||
- **Mobile**: `api/src/`, `ios/src/` or `android/src/`
|
||||
- Paths shown below assume single project - adjust based on plan.md structure
|
||||
|
||||
<!--
|
||||
============================================================================
|
||||
IMPORTANT: The tasks below are SAMPLE TASKS for illustration purposes only.
|
||||
|
||||
The /speckit.tasks command MUST replace these with actual tasks based on:
|
||||
- User stories from spec.md (with their priorities P1, P2, P3...)
|
||||
- Feature requirements from plan.md
|
||||
- Entities from data-model.md
|
||||
- Endpoints from contracts/
|
||||
|
||||
Tasks MUST be organized by user story so each story can be:
|
||||
- Implemented independently
|
||||
- Tested independently
|
||||
- Delivered as an MVP increment
|
||||
|
||||
DO NOT keep these sample tasks in the generated tasks.md file.
|
||||
============================================================================
|
||||
-->
|
||||
|
||||
## Phase 1: Setup (Shared Infrastructure)
|
||||
|
||||
**Purpose**: Project initialization and basic structure
|
||||
|
||||
- [ ] T001 Create project structure per implementation plan
|
||||
- [ ] T002 Initialize [language] project with [framework] dependencies
|
||||
- [ ] T003 [P] Configure linting and formatting tools
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: Foundational (Blocking Prerequisites)
|
||||
|
||||
**Purpose**: Core infrastructure that MUST be complete before ANY user story can be implemented
|
||||
|
||||
**⚠️ CRITICAL**: No user story work can begin until this phase is complete
|
||||
|
||||
Examples of foundational tasks (adjust based on your project):
|
||||
|
||||
- [ ] T004 Setup database schema and migrations framework
|
||||
- [ ] T005 [P] Implement authentication/authorization framework
|
||||
- [ ] T006 [P] Setup API routing and middleware structure
|
||||
- [ ] T007 Create base models/entities that all stories depend on
|
||||
- [ ] T008 Configure error handling and logging infrastructure
|
||||
- [ ] T009 Setup environment configuration management
|
||||
|
||||
**Checkpoint**: Foundation ready - user story implementation can now begin in parallel
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: User Story 1 - [Title] (Priority: P1) 🎯 MVP
|
||||
|
||||
**Goal**: [Brief description of what this story delivers]
|
||||
|
||||
**Independent Test**: [How to verify this story works on its own]
|
||||
|
||||
### Tests for User Story 1 (OPTIONAL - only if tests requested) ⚠️
|
||||
|
||||
> **NOTE: Write these tests FIRST, ensure they FAIL before implementation**
|
||||
|
||||
- [ ] T010 [P] [US1] Contract test for [endpoint] in tests/contract/test_[name].py
|
||||
- [ ] T011 [P] [US1] Integration test for [user journey] in tests/integration/test_[name].py
|
||||
|
||||
### Implementation for User Story 1
|
||||
|
||||
- [ ] T012 [P] [US1] Create [Entity1] model in src/models/[entity1].py
|
||||
- [ ] T013 [P] [US1] Create [Entity2] model in src/models/[entity2].py
|
||||
- [ ] T014 [US1] Implement [Service] in src/services/[service].py (depends on T012, T013)
|
||||
- [ ] T015 [US1] Implement [endpoint/feature] in src/[location]/[file].py
|
||||
- [ ] T016 [US1] Add validation and error handling
|
||||
- [ ] T017 [US1] Add logging for user story 1 operations
|
||||
|
||||
**Checkpoint**: At this point, User Story 1 should be fully functional and testable independently
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: User Story 2 - [Title] (Priority: P2)
|
||||
|
||||
**Goal**: [Brief description of what this story delivers]
|
||||
|
||||
**Independent Test**: [How to verify this story works on its own]
|
||||
|
||||
### Tests for User Story 2 (OPTIONAL - only if tests requested) ⚠️
|
||||
|
||||
- [ ] T018 [P] [US2] Contract test for [endpoint] in tests/contract/test_[name].py
|
||||
- [ ] T019 [P] [US2] Integration test for [user journey] in tests/integration/test_[name].py
|
||||
|
||||
### Implementation for User Story 2
|
||||
|
||||
- [ ] T020 [P] [US2] Create [Entity] model in src/models/[entity].py
|
||||
- [ ] T021 [US2] Implement [Service] in src/services/[service].py
|
||||
- [ ] T022 [US2] Implement [endpoint/feature] in src/[location]/[file].py
|
||||
- [ ] T023 [US2] Integrate with User Story 1 components (if needed)
|
||||
|
||||
**Checkpoint**: At this point, User Stories 1 AND 2 should both work independently
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: User Story 3 - [Title] (Priority: P3)
|
||||
|
||||
**Goal**: [Brief description of what this story delivers]
|
||||
|
||||
**Independent Test**: [How to verify this story works on its own]
|
||||
|
||||
### Tests for User Story 3 (OPTIONAL - only if tests requested) ⚠️
|
||||
|
||||
- [ ] T024 [P] [US3] Contract test for [endpoint] in tests/contract/test_[name].py
|
||||
- [ ] T025 [P] [US3] Integration test for [user journey] in tests/integration/test_[name].py
|
||||
|
||||
### Implementation for User Story 3
|
||||
|
||||
- [ ] T026 [P] [US3] Create [Entity] model in src/models/[entity].py
|
||||
- [ ] T027 [US3] Implement [Service] in src/services/[service].py
|
||||
- [ ] T028 [US3] Implement [endpoint/feature] in src/[location]/[file].py
|
||||
|
||||
**Checkpoint**: All user stories should now be independently functional
|
||||
|
||||
---
|
||||
|
||||
[Add more user story phases as needed, following the same pattern]
|
||||
|
||||
---
|
||||
|
||||
## Phase N: Polish & Cross-Cutting Concerns
|
||||
|
||||
**Purpose**: Improvements that affect multiple user stories
|
||||
|
||||
- [ ] TXXX [P] Documentation updates in docs/
|
||||
- [ ] TXXX Code cleanup and refactoring
|
||||
- [ ] TXXX Performance optimization across all stories
|
||||
- [ ] TXXX [P] Additional unit tests (if requested) in tests/unit/
|
||||
- [ ] TXXX Security hardening
|
||||
- [ ] TXXX Run quickstart.md validation
|
||||
|
||||
---
|
||||
|
||||
## Dependencies & Execution Order
|
||||
|
||||
### Phase Dependencies
|
||||
|
||||
- **Setup (Phase 1)**: No dependencies - can start immediately
|
||||
- **Foundational (Phase 2)**: Depends on Setup completion - BLOCKS all user stories
|
||||
- **User Stories (Phase 3+)**: All depend on Foundational phase completion
|
||||
- User stories can then proceed in parallel (if staffed)
|
||||
- Or sequentially in priority order (P1 → P2 → P3)
|
||||
- **Polish (Final Phase)**: Depends on all desired user stories being complete
|
||||
|
||||
### User Story Dependencies
|
||||
|
||||
- **User Story 1 (P1)**: Can start after Foundational (Phase 2) - No dependencies on other stories
|
||||
- **User Story 2 (P2)**: Can start after Foundational (Phase 2) - May integrate with US1 but should be independently testable
|
||||
- **User Story 3 (P3)**: Can start after Foundational (Phase 2) - May integrate with US1/US2 but should be independently testable
|
||||
|
||||
### Within Each User Story
|
||||
|
||||
- Tests (if included) MUST be written and FAIL before implementation
|
||||
- Models before services
|
||||
- Services before endpoints
|
||||
- Core implementation before integration
|
||||
- Story complete before moving to next priority
|
||||
|
||||
### Parallel Opportunities
|
||||
|
||||
- All Setup tasks marked [P] can run in parallel
|
||||
- All Foundational tasks marked [P] can run in parallel (within Phase 2)
|
||||
- Once Foundational phase completes, all user stories can start in parallel (if team capacity allows)
|
||||
- All tests for a user story marked [P] can run in parallel
|
||||
- Models within a story marked [P] can run in parallel
|
||||
- Different user stories can be worked on in parallel by different team members
|
||||
|
||||
---
|
||||
|
||||
## Parallel Example: User Story 1
|
||||
|
||||
```bash
|
||||
# Launch all tests for User Story 1 together (if tests requested):
|
||||
Task: "Contract test for [endpoint] in tests/contract/test_[name].py"
|
||||
Task: "Integration test for [user journey] in tests/integration/test_[name].py"
|
||||
|
||||
# Launch all models for User Story 1 together:
|
||||
Task: "Create [Entity1] model in src/models/[entity1].py"
|
||||
Task: "Create [Entity2] model in src/models/[entity2].py"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Implementation Strategy
|
||||
|
||||
### MVP First (User Story 1 Only)
|
||||
|
||||
1. Complete Phase 1: Setup
|
||||
2. Complete Phase 2: Foundational (CRITICAL - blocks all stories)
|
||||
3. Complete Phase 3: User Story 1
|
||||
4. **STOP and VALIDATE**: Test User Story 1 independently
|
||||
5. Deploy/demo if ready
|
||||
|
||||
### Incremental Delivery
|
||||
|
||||
1. Complete Setup + Foundational → Foundation ready
|
||||
2. Add User Story 1 → Test independently → Deploy/Demo (MVP!)
|
||||
3. Add User Story 2 → Test independently → Deploy/Demo
|
||||
4. Add User Story 3 → Test independently → Deploy/Demo
|
||||
5. Each story adds value without breaking previous stories
|
||||
|
||||
### Parallel Team Strategy
|
||||
|
||||
With multiple developers:
|
||||
|
||||
1. Team completes Setup + Foundational together
|
||||
2. Once Foundational is done:
|
||||
- Developer A: User Story 1
|
||||
- Developer B: User Story 2
|
||||
- Developer C: User Story 3
|
||||
3. Stories complete and integrate independently
|
||||
|
||||
---
|
||||
|
||||
## Notes
|
||||
|
||||
- [P] tasks = different files, no dependencies
|
||||
- [Story] label maps task to specific user story for traceability
|
||||
- Each user story should be independently completable and testable
|
||||
- Verify tests fail before implementing
|
||||
- Commit after each task or logical group
|
||||
- Stop at any checkpoint to validate story independently
|
||||
- Avoid: vague tasks, same file conflicts, cross-story dependencies that break independence
|
||||
1
.spectral.yml
Normal file
1
.spectral.yml
Normal file
@@ -0,0 +1 @@
|
||||
# OpenAPI linting rules
|
||||
98
.vscode/extensions.json
vendored
98
.vscode/extensions.json
vendored
@@ -1,66 +1,40 @@
|
||||
{ "recommendations": [
|
||||
"aaron-bond.better-comments",
|
||||
"anbuselvanrocky.bootstrap5-vscode",
|
||||
"bmewburn.vscode-intelephense-client",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"christian-kohler.path-intellisense",
|
||||
"codezombiech.gitignore",
|
||||
"davidanson.vscode-markdownlint",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"dsznajder.es7-react-js-snippets",
|
||||
"dunstontc.vscode-docker-syntax",
|
||||
"eamodio.gitlens",
|
||||
"easycodeai.chatgpt-gpt4-gpt3-vscode",
|
||||
"ecmel.vscode-html-css",
|
||||
"editorconfig.editorconfig",
|
||||
"esbenp.prettier-vscode",
|
||||
"firsttris.vscode-jest-runner",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"github.copilot",
|
||||
"github.copilot-chat",
|
||||
"google.geminicodeassist",
|
||||
"hansuxdev.bootstrap5-snippets",
|
||||
"heybourn.headwind",
|
||||
"humao.rest-client",
|
||||
"imgildev.vscode-auto-barrel",
|
||||
"imgildev.vscode-json-flow",
|
||||
"imgildev.vscode-nestjs-generator",
|
||||
"imgildev.vscode-nestjs-pack",
|
||||
"imgildev.vscode-nestjs-snippets-extension",
|
||||
"imgildev.vscode-nestjs-swagger-snippets",
|
||||
"inferrinizzard.prettier-sql-vscode",
|
||||
"jmkrivocapich.drawfolderstructure",
|
||||
"mhutchie.git-graph",
|
||||
"mikestead.dotenv",
|
||||
"ms-azuretools.vscode-containers",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"ms-edgedevtools.vscode-edge-devtools",
|
||||
"ms-python.debugpy",
|
||||
"ms-python.python",
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"ms-vscode-remote.remote-ssh",
|
||||
"ms-vscode-remote.remote-ssh-edit",
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-vscode.powershell",
|
||||
"ms-vscode.remote-explorer",
|
||||
"mtxr.sqltools",
|
||||
"mtxr.sqltools-driver-mysql",
|
||||
"oderwat.indent-rainbow",
|
||||
"orta.vscode-jest",
|
||||
"pdconsec.vscode-print",
|
||||
"pmneo.tsimporter",
|
||||
"postman.postman-for-vscode",
|
||||
"prisma.prisma",
|
||||
"redhat.vscode-yaml",
|
||||
"rioj7.command-variable",
|
||||
"ritwickdey.liveserver",
|
||||
"rvest.vs-code-prettier-eslint",
|
||||
"shardulm94.trailing-spaces",
|
||||
"steoates.autoimport",
|
||||
"stringham.move-ts",
|
||||
"ms-vscode.csharp",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
"usernamehw.errorlens",
|
||||
"vincaslt.highlight-matching-tag",
|
||||
"vscode-icons-team.vscode-icons",
|
||||
"yoavbls.pretty-ts-errors",
|
||||
"yzhang.markdown-all-in-one",
|
||||
"yoavbls.pretty-typescript-errors",
|
||||
"wix.vscode-import-cost",
|
||||
"aaron-bond.better-comments",
|
||||
"gruntfuggly.todo-tree",
|
||||
"ashinzekene.nestjs",
|
||||
"orta.vscode-jest",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"heybourn.headwind",
|
||||
"prisma.prisma",
|
||||
"rangav.vscode-thunder-client",
|
||||
"humao.rest-client",
|
||||
"formulahendry.auto-close-tag",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"mtxr.sqltools",
|
||||
"redhat.vscode-yaml",
|
||||
"mikestead.dotenv",
|
||||
"editorconfig.editorconfig",
|
||||
"eamodio.gitlens",
|
||||
"mhutchie.git-graph",
|
||||
"vivaxy.vscode-conventional-commits",
|
||||
"christian-kohler.path-intellisense",
|
||||
"christian-kohler.npm-intellisense",
|
||||
"chakrounanas.turbo-console-log",
|
||||
"pranaygp.vscode-css-peek",
|
||||
"alefragnani.bookmarks",
|
||||
"pkief.material-icon-theme",
|
||||
"github.copilot",
|
||||
"bierner.markdown-mermaid",
|
||||
"vitest.explorer",
|
||||
"google.geminicodeassist"
|
||||
]
|
||||
}
|
||||
|
||||
59
.vscode/extensions.json.bak
vendored
Normal file
59
.vscode/extensions.json.bak
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
"recommendations": [
|
||||
// Linting & Formatting
|
||||
"dbaeumer.vscode-eslint",
|
||||
"esbenp.prettier-vscode",
|
||||
|
||||
// Code Quality & Error Handling
|
||||
"usernamehw.errorlens",
|
||||
"yoavbls.pretty-typescript-errors",
|
||||
"aaron-bond.better-comments",
|
||||
"gruntfuggly.todo-tree",
|
||||
|
||||
// Framework & Language Support
|
||||
"ashinzekene.nestjs",
|
||||
"dsznajder.es7-react-js-snippets",
|
||||
"orta.vscode-jest",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"heybourn.headwind",
|
||||
"prisma.prisma",
|
||||
|
||||
// API Testing
|
||||
"rangav.vscode-thunder-client",
|
||||
"formulahendry.auto-close-tag",
|
||||
"formulahendry.auto-rename-tag",
|
||||
|
||||
// Docker & DevOps
|
||||
"ms-azuretools.vscode-docker",
|
||||
"mtxr.sqltools",
|
||||
"redhat.vscode-yaml",
|
||||
"mikestead.dotenv",
|
||||
"editorconfig.editorconfig",
|
||||
|
||||
// Git
|
||||
"eamodio.gitlens",
|
||||
"mhutchie.git-graph",
|
||||
"vivaxy.vscode-conventional-commits",
|
||||
|
||||
// Path & Navigation
|
||||
"christian-kohler.path-intellisense",
|
||||
"christian-kohler.npm-intellisense",
|
||||
"csstools.postcss",
|
||||
|
||||
// CSS Enhancement
|
||||
"pranaygp.vscode-css-peek",
|
||||
|
||||
// Productivity
|
||||
"alefragnani.bookmarks",
|
||||
"chakrounanas.turbo-console-log",
|
||||
"wallabyjs.console-ninja",
|
||||
|
||||
// Icons & Theme
|
||||
"pkief.material-icon-theme",
|
||||
"bierner.markdown-mermaid"
|
||||
|
||||
// AI Assistance (Optional - เลือก 1 อัน)
|
||||
// "github.copilot",
|
||||
// "tabnine.tabnine-vscode"
|
||||
]
|
||||
}
|
||||
13
.vscode/settings.json
vendored
13
.vscode/settings.json
vendored
@@ -1,12 +1 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit"
|
||||
},
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"javascriptreact",
|
||||
"typescript",
|
||||
"typescriptreact"
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
559
.vscode/settings.json.bak
vendored
Normal file
559
.vscode/settings.json.bak
vendored
Normal file
@@ -0,0 +1,559 @@
|
||||
{
|
||||
// ========================================
|
||||
// EDITOR SETTINGS
|
||||
// ========================================
|
||||
|
||||
// ขนาดตัวอักษรในโค้ด
|
||||
"editor.fontSize": 14,
|
||||
|
||||
// ความสูงของแต่ละบรรทัด
|
||||
"editor.lineHeight": 1.6,
|
||||
|
||||
// แสดงเส้นแนวตั้งที่ตำแหน่งตัวอักษรที่ 80 และ 120
|
||||
"editor.rulers": [80, 120],
|
||||
|
||||
// เปิดใช้ minimap ขวามือ
|
||||
"editor.minimap.enabled": true,
|
||||
|
||||
// แสดงช่องว่างและ tab เป็นจุดและเส้น
|
||||
"editor.renderWhitespace": "boundary",
|
||||
|
||||
// เปิดใช้ bracket pair colorization
|
||||
"editor.bracketPairColorization.enabled": true,
|
||||
|
||||
// แสดงเส้นเชื่อม brackets
|
||||
"editor.guides.bracketPairs": "active",
|
||||
|
||||
// smooth scrolling
|
||||
"editor.smoothScrolling": true,
|
||||
|
||||
// cursor animation
|
||||
"editor.cursorBlinking": "smooth",
|
||||
"editor.cursorSmoothCaretAnimation": "on",
|
||||
|
||||
// แสดง breadcrumb ด้านบน
|
||||
"breadcrumbs.enabled": true,
|
||||
|
||||
// word wrap ที่ขอบหน้าต่าง
|
||||
"editor.wordWrap": "on",
|
||||
|
||||
// ========================================
|
||||
// FORMAT ON SAVE
|
||||
// ========================================
|
||||
|
||||
// format โค้ดอัตโนมัติเมื่อ save
|
||||
"editor.formatOnSave": true,
|
||||
|
||||
// format โค้ดเมื่อ paste
|
||||
"editor.formatOnPaste": true,
|
||||
|
||||
// ใช้ Prettier เป็น default formatter
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
|
||||
// กำหนด formatter เฉพาะแต่ละภาษา
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[html]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[css]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[scss]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[markdown]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
|
||||
// ========================================
|
||||
// CODE ACTION ON SAVE
|
||||
// ========================================
|
||||
|
||||
// รัน ESLint fix และจัดเรียง imports เมื่อ save
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll.eslint": "explicit",
|
||||
"source.organizeImports": "explicit",
|
||||
"source.addMissingImports": "explicit"
|
||||
},
|
||||
|
||||
// ========================================
|
||||
// PRETTIER SETTINGS
|
||||
// ========================================
|
||||
|
||||
// ใช้ single quotes แทน double quotes
|
||||
"prettier.singleQuote": true,
|
||||
|
||||
// ใช้ semicolons ท้ายบรรทัด
|
||||
"prettier.semi": true,
|
||||
|
||||
// ความกว้างของ tab เป็น 2 spaces
|
||||
"prettier.tabWidth": 2,
|
||||
|
||||
// ใช้ spaces แทน tabs
|
||||
"prettier.useTabs": false,
|
||||
|
||||
// ใส่ trailing comma ใน ES5 (objects, arrays, etc.)
|
||||
"prettier.trailingComma": "es5",
|
||||
|
||||
// ความกว้างสูงสุดก่อนขึ้นบรรทัดใหม่
|
||||
"prettier.printWidth": 80,
|
||||
|
||||
// ใส่ comma ท้ายสุดใน multiline
|
||||
"prettier.arrowParens": "always",
|
||||
|
||||
// ใช้ LF (Line Feed) แทน CRLF
|
||||
"prettier.endOfLine": "lf",
|
||||
|
||||
// ========================================
|
||||
// ESLINT SETTINGS
|
||||
// ========================================
|
||||
|
||||
// เปิดใช้ ESLint
|
||||
"eslint.enable": true,
|
||||
|
||||
// รัน ESLint บนไฟล์เหล่านี้
|
||||
"eslint.validate": [
|
||||
"javascript",
|
||||
"javascriptreact",
|
||||
"typescript",
|
||||
"typescriptreact"
|
||||
],
|
||||
|
||||
// แสดง ESLint status ใน status bar
|
||||
"eslint.alwaysShowStatus": true,
|
||||
|
||||
// ========================================
|
||||
// ERROR LENS SETTINGS
|
||||
// ========================================
|
||||
|
||||
// เปิดใช้ Error Lens
|
||||
"errorLens.enabled": true,
|
||||
|
||||
// แสดง errors, warnings, และ info
|
||||
"errorLens.enabledDiagnosticLevels": [
|
||||
"error",
|
||||
"warning",
|
||||
"info"
|
||||
],
|
||||
|
||||
// ระยะห่างของข้อความจากโค้ด
|
||||
"errorLens.padding": "0 1ch",
|
||||
|
||||
// ตำแหน่งข้อความ error
|
||||
"errorLens.messageTemplate": "$message",
|
||||
|
||||
// แสดง error ท้ายบรรทัด
|
||||
"errorLens.messageEnabled": true,
|
||||
|
||||
// ========================================
|
||||
// TAILWIND CSS SETTINGS
|
||||
// ========================================
|
||||
|
||||
// เปิดใช้ IntelliSense สำหรับ Tailwind
|
||||
"tailwindCSS.emmetCompletions": true,
|
||||
|
||||
// แสดง color preview
|
||||
"tailwindCSS.colorDecorators": true,
|
||||
|
||||
// เปิดใช้ suggestions
|
||||
"tailwindCSS.suggestions": true,
|
||||
|
||||
// ไฟล์ที่จะใช้ Tailwind IntelliSense
|
||||
"tailwindCSS.includeLanguages": {
|
||||
"typescript": "javascript",
|
||||
"typescriptreact": "javascript"
|
||||
},
|
||||
|
||||
// ========================================
|
||||
// HEADWIND (Tailwind Class Sorter)
|
||||
// ========================================
|
||||
|
||||
// รัน Headwind เมื่อ save
|
||||
"headwind.runOnSave": true,
|
||||
|
||||
// ========================================
|
||||
// AUTO CLOSE/RENAME TAG
|
||||
// ========================================
|
||||
|
||||
// เปิดใช้ auto close tag
|
||||
"auto-close-tag.activationOnLanguage": [
|
||||
"html",
|
||||
"xml",
|
||||
"php",
|
||||
"javascript",
|
||||
"javascriptreact",
|
||||
"typescript",
|
||||
"typescriptreact"
|
||||
],
|
||||
|
||||
// เปิดใช้ auto rename tag
|
||||
"auto-rename-tag.activationOnLanguage": [
|
||||
"html",
|
||||
"xml",
|
||||
"php",
|
||||
"javascript",
|
||||
"javascriptreact",
|
||||
"typescript",
|
||||
"typescriptreact"
|
||||
],
|
||||
|
||||
// ========================================
|
||||
// BETTER COMMENTS
|
||||
// ========================================
|
||||
|
||||
// กำหนดสีและรูปแบบของ comments
|
||||
"better-comments.tags": [
|
||||
{
|
||||
"tag": "!",
|
||||
"color": "#FF2D00",
|
||||
"strikethrough": false,
|
||||
"underline": false,
|
||||
"backgroundColor": "transparent",
|
||||
"bold": false,
|
||||
"italic": false
|
||||
},
|
||||
{
|
||||
"tag": "?",
|
||||
"color": "#3498DB",
|
||||
"strikethrough": false,
|
||||
"underline": false,
|
||||
"backgroundColor": "transparent",
|
||||
"bold": false,
|
||||
"italic": false
|
||||
},
|
||||
{
|
||||
"tag": "//",
|
||||
"color": "#474747",
|
||||
"strikethrough": true,
|
||||
"underline": false,
|
||||
"backgroundColor": "transparent",
|
||||
"bold": false,
|
||||
"italic": false
|
||||
},
|
||||
{
|
||||
"tag": "todo",
|
||||
"color": "#FF8C00",
|
||||
"strikethrough": false,
|
||||
"underline": false,
|
||||
"backgroundColor": "transparent",
|
||||
"bold": false,
|
||||
"italic": false
|
||||
},
|
||||
{
|
||||
"tag": "*",
|
||||
"color": "#98C379",
|
||||
"strikethrough": false,
|
||||
"underline": false,
|
||||
"backgroundColor": "transparent",
|
||||
"bold": false,
|
||||
"italic": false
|
||||
}
|
||||
],
|
||||
|
||||
// ========================================
|
||||
// TODO TREE
|
||||
// ========================================
|
||||
|
||||
// keywords ที่จะ highlight
|
||||
"todo-tree.general.tags": [
|
||||
"TODO",
|
||||
"FIXME",
|
||||
"BUG",
|
||||
"HACK",
|
||||
"NOTE",
|
||||
"XXX"
|
||||
],
|
||||
|
||||
// highlight TODO ในโค้ด
|
||||
"todo-tree.highlights.enabled": true,
|
||||
|
||||
// แสดง TODO tree ใน activity bar
|
||||
"todo-tree.tree.showInExplorer": false,
|
||||
|
||||
// กำหนดสีของแต่ละ tag
|
||||
"todo-tree.highlights.defaultHighlight": {
|
||||
"foreground": "black",
|
||||
"type": "text",
|
||||
"opacity": 50
|
||||
},
|
||||
|
||||
"todo-tree.highlights.customHighlight": {
|
||||
"TODO": {
|
||||
"icon": "check",
|
||||
"iconColour": "#FF8C00",
|
||||
"foreground": "#FF8C00"
|
||||
},
|
||||
"FIXME": {
|
||||
"icon": "alert",
|
||||
"iconColour": "#FF2D00",
|
||||
"foreground": "#FF2D00"
|
||||
},
|
||||
"BUG": {
|
||||
"icon": "bug",
|
||||
"iconColour": "#FF2D00",
|
||||
"foreground": "#FF2D00"
|
||||
},
|
||||
"NOTE": {
|
||||
"icon": "note",
|
||||
"iconColour": "#3498DB",
|
||||
"foreground": "#3498DB"
|
||||
}
|
||||
},
|
||||
|
||||
// ========================================
|
||||
// GITLENS SETTINGS
|
||||
// ========================================
|
||||
|
||||
// แสดง Git blame ใน status bar
|
||||
"gitlens.statusBar.enabled": true,
|
||||
|
||||
// แสดง current line blame
|
||||
"gitlens.currentLine.enabled": true,
|
||||
|
||||
// format ของ current line blame
|
||||
"gitlens.currentLine.format": "${author}, ${agoOrDate}",
|
||||
|
||||
// แสดง codelens (ข้อมูล Git เหนือฟังก์ชัน)
|
||||
"gitlens.codeLens.enabled": true,
|
||||
|
||||
// แสดง blame annotations
|
||||
"gitlens.hovers.enabled": true,
|
||||
|
||||
// ========================================
|
||||
// GIT SETTINGS
|
||||
// ========================================
|
||||
|
||||
// เปิดใช้ Git
|
||||
"git.enabled": true,
|
||||
|
||||
// auto fetch ทุก 180 วินาที
|
||||
"git.autofetch": true,
|
||||
"git.autofetchPeriod": 180,
|
||||
|
||||
// ยืนยันก่อน sync
|
||||
"git.confirmSync": false,
|
||||
|
||||
// enable smart commit
|
||||
"git.enableSmartCommit": true,
|
||||
|
||||
// ========================================
|
||||
// PATH INTELLISENSE
|
||||
// ========================================
|
||||
|
||||
// mappings สำหรับ path aliases
|
||||
"path-intellisense.mappings": {
|
||||
"@": "${workspaceFolder}/src"
|
||||
},
|
||||
|
||||
// ========================================
|
||||
// IMPORT COST
|
||||
// ========================================
|
||||
|
||||
// แสดงขนาดของ imports
|
||||
"importCost.showCalculatingDecoration": true,
|
||||
|
||||
// เตือนเมื่อ import ใหญ่เกิน
|
||||
"importCost.largePackageSize": 100,
|
||||
"importCost.mediumPackageSize": 50,
|
||||
"importCost.smallPackageSize": 20,
|
||||
|
||||
// ========================================
|
||||
// JAVASCRIPT/TYPESCRIPT
|
||||
// ========================================
|
||||
|
||||
// auto imports
|
||||
"javascript.suggest.autoImports": true,
|
||||
"typescript.suggest.autoImports": true,
|
||||
|
||||
// update imports on file move
|
||||
"javascript.updateImportsOnFileMove.enabled": "always",
|
||||
"typescript.updateImportsOnFileMove.enabled": "always",
|
||||
|
||||
// inlay hints
|
||||
"javascript.inlayHints.parameterNames.enabled": "all",
|
||||
"typescript.inlayHints.parameterNames.enabled": "all",
|
||||
"javascript.inlayHints.functionLikeReturnTypes.enabled": true,
|
||||
"typescript.inlayHints.functionLikeReturnTypes.enabled": true,
|
||||
|
||||
// ========================================
|
||||
// EMMET
|
||||
// ========================================
|
||||
|
||||
// เปิดใช้ Emmet
|
||||
"emmet.includeLanguages": {
|
||||
"javascript": "javascriptreact",
|
||||
"typescript": "typescriptreact"
|
||||
},
|
||||
|
||||
// trigger Emmet ด้วย Tab
|
||||
"emmet.triggerExpansionOnTab": true,
|
||||
|
||||
// ========================================
|
||||
// FILES
|
||||
// ========================================
|
||||
|
||||
// auto save
|
||||
"files.autoSave": "onFocusChange",
|
||||
|
||||
// ลบ whitespace ท้ายบรรทัดเมื่อ save
|
||||
"files.trimTrailingWhitespace": true,
|
||||
|
||||
// เพิ่มบรรทัดว่างท้ายไฟล์
|
||||
"files.insertFinalNewline": true,
|
||||
|
||||
// encoding
|
||||
"files.encoding": "utf8",
|
||||
|
||||
// line ending
|
||||
"files.eol": "\n",
|
||||
|
||||
// exclude files/folders จาก explorer
|
||||
"files.exclude": {
|
||||
"**/.git": true,
|
||||
"**/.DS_Store": true,
|
||||
"**/node_modules": true,
|
||||
"**/.next": true,
|
||||
"**/dist": true,
|
||||
"**/build": true,
|
||||
"**/.turbo": true
|
||||
},
|
||||
|
||||
// ========================================
|
||||
// SEARCH
|
||||
// ========================================
|
||||
|
||||
// exclude files/folders จากการค้นหา
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
"**/dist": true,
|
||||
"**/build": true,
|
||||
"**/.next": true,
|
||||
"**/.turbo": true,
|
||||
"**/coverage": true,
|
||||
"**/.git": true,
|
||||
"**/yarn.lock": true,
|
||||
"**/package-lock.json": true,
|
||||
"**/pnpm-lock.yaml": true
|
||||
},
|
||||
|
||||
// ========================================
|
||||
// TERMINAL
|
||||
// ========================================
|
||||
|
||||
// font size ใน terminal
|
||||
"terminal.integrated.fontSize": 13,
|
||||
|
||||
// line height ใน terminal
|
||||
"terminal.integrated.lineHeight": 1.2,
|
||||
|
||||
// smooth scrolling
|
||||
"terminal.integrated.smoothScrolling": true,
|
||||
|
||||
// ========================================
|
||||
// WORKBENCH
|
||||
// ========================================
|
||||
|
||||
// icon theme
|
||||
"workbench.iconTheme": "material-icon-theme",
|
||||
|
||||
// color theme (เลือกตามชอบ)
|
||||
// "workbench.colorTheme": "One Dark Pro",
|
||||
|
||||
// แสดง activity bar
|
||||
"workbench.activityBar.location": "default",
|
||||
|
||||
// tree indent
|
||||
"workbench.tree.indent": 15,
|
||||
|
||||
// smooth scrolling
|
||||
"workbench.list.smoothScrolling": true,
|
||||
|
||||
// ========================================
|
||||
// EXPLORER
|
||||
// ========================================
|
||||
|
||||
// compact folders
|
||||
"explorer.compactFolders": false,
|
||||
|
||||
// confirm before delete
|
||||
"explorer.confirmDelete": true,
|
||||
|
||||
// confirm drag and drop
|
||||
"explorer.confirmDragAndDrop": false,
|
||||
|
||||
// ========================================
|
||||
// JEST
|
||||
// ========================================
|
||||
|
||||
// auto run tests
|
||||
"jest.autoRun": "off",
|
||||
|
||||
// แสดง coverage overlay
|
||||
"jest.showCoverageOnLoad": false,
|
||||
|
||||
// ========================================
|
||||
// DOCKER
|
||||
// ========================================
|
||||
|
||||
// format docker files
|
||||
"docker.languageserver.formatter.ignoreMultilineInstructions": true,
|
||||
|
||||
// ========================================
|
||||
// YAML
|
||||
// ========================================
|
||||
|
||||
// schemas สำหรับ YAML validation
|
||||
"yaml.schemas": {
|
||||
"https://json.schemastore.org/github-workflow.json": ".github/workflows/*.{yml,yaml}",
|
||||
"https://json.schemastore.org/github-action.json": "action.{yml,yaml}",
|
||||
"https://json.schemastore.org/prettierrc.json": ".prettierrc.{yml,yaml}"
|
||||
},
|
||||
|
||||
// format YAML files
|
||||
"yaml.format.enable": true,
|
||||
|
||||
// ========================================
|
||||
// CONSOLE NINJA
|
||||
// ========================================
|
||||
|
||||
// เปิดใช้ Console Ninja
|
||||
"console-ninja.featureSet": "Community",
|
||||
|
||||
// ========================================
|
||||
// REST CLIENT
|
||||
// ========================================
|
||||
|
||||
// timeout สำหรับ HTTP requests (milliseconds)
|
||||
"rest-client.timeoutinmilliseconds": 30000,
|
||||
|
||||
// แสดงเวลาที่ใช้ในการ request
|
||||
"rest-client.showResponseInDifferentTab": true,
|
||||
|
||||
// ========================================
|
||||
// SECURITY
|
||||
// ========================================
|
||||
|
||||
// trust workspace
|
||||
"security.workspace.trust.untrustedFiles": "open",
|
||||
|
||||
// ========================================
|
||||
// PERFORMANCE
|
||||
// ========================================
|
||||
|
||||
// จำกัดจำนวนไฟล์ที่เปิดพร้อมกัน
|
||||
"files.maxMemoryForLargeFilesMB": 4096,
|
||||
|
||||
// ปิด crash reporter
|
||||
"telemetry.telemetryLevel": "off"
|
||||
}
|
||||
81
.vscode/tasks.json.bak
vendored
Normal file
81
.vscode/tasks.json.bak
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Backend: Dev",
|
||||
"type": "shell",
|
||||
"command": "pnpm run start:dev",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
},
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Backend: Build",
|
||||
"type": "shell",
|
||||
"command": "pnpm run build",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
},
|
||||
"group": "build",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Backend: Lint",
|
||||
"type": "shell",
|
||||
"command": "pnpm run lint",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
},
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Backend: Test",
|
||||
"type": "shell",
|
||||
"command": "pnpm run test",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
},
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
}
|
||||
},
|
||||
{
|
||||
"label": "Backend: Test E2E",
|
||||
"type": "shell",
|
||||
"command": "pnpm run test:e2e",
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
},
|
||||
"group": "test",
|
||||
"problemMatcher": [],
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
38
2git.ps1
Normal file
38
2git.ps1
Normal file
@@ -0,0 +1,38 @@
|
||||
param([string]$Message = "Backup")
|
||||
|
||||
$Timestamp = Get-Date -Format "yyMMdd:HHmm"
|
||||
$CommitMsg = "$Timestamp $Message"
|
||||
|
||||
Write-Host "📦 $CommitMsg" -ForegroundColor Cyan
|
||||
|
||||
git add .
|
||||
|
||||
# Check if anything to commit
|
||||
$status = git status --porcelain
|
||||
if (-not $status) {
|
||||
Write-Host "⚠️ Nothing to commit" -ForegroundColor Yellow
|
||||
pause
|
||||
exit
|
||||
}
|
||||
|
||||
git commit -m $CommitMsg
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Host "❌ Commit failed" -ForegroundColor Red
|
||||
pause
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host "🚀 Pushing to Gitea..." -ForegroundColor Cyan
|
||||
git push origin main
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Host "❌ Push to Gitea failed" -ForegroundColor Red
|
||||
}
|
||||
|
||||
Write-Host "🚀 Pushing to GitHub..." -ForegroundColor Cyan
|
||||
git push github main
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Host "❌ Push to GitHub failed" -ForegroundColor Red
|
||||
}
|
||||
|
||||
Write-Host "✅ Done!" -ForegroundColor Green
|
||||
pause
|
||||
148
CHANGELOG.md
Normal file
148
CHANGELOG.md
Normal file
@@ -0,0 +1,148 @@
|
||||
# Version History
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### In Progress
|
||||
- E2E Testing & UAT preparation
|
||||
- Performance optimization and load testing
|
||||
- Production deployment preparation
|
||||
|
||||
## 1.6.0 (2025-12-13)
|
||||
|
||||
### Summary
|
||||
**Schema Refactoring Release** - Major restructuring of correspondence and RFA tables for improved data consistency.
|
||||
|
||||
### Database Schema Changes 💾
|
||||
|
||||
#### Breaking Changes ⚠️
|
||||
- **`correspondence_recipients`**: FK changed from `correspondence_revisions(correspondence_id)` → `correspondences(id)`
|
||||
- **`rfa_items`**: Column renamed `rfarev_correspondence_id` → `rfa_revision_id`
|
||||
|
||||
#### Schema Refactoring
|
||||
- **`correspondences`**: Reordered columns, `discipline_id` now inline (no ALTER TABLE)
|
||||
- **`correspondence_revisions`**:
|
||||
- Renamed: `title` → `subject`
|
||||
- Added: `body TEXT`, `remarks TEXT`, `schema_version INT`
|
||||
- Added Virtual Columns: `v_ref_project_id`, `v_doc_subtype`
|
||||
- **`rfas`**:
|
||||
- Changed to Shared PK pattern (no AUTO_INCREMENT)
|
||||
- PK now FK to `correspondences(id)`
|
||||
- **`rfa_revisions`**:
|
||||
- Removed: `correspondence_id` (uses rfas.id instead)
|
||||
- Renamed: `title` → `subject`
|
||||
- Added: `body TEXT`, `remarks TEXT`, `due_date DATETIME`, `schema_version INT`
|
||||
- Added Virtual Column: `v_ref_drawing_count`
|
||||
|
||||
### Documentation 📚
|
||||
- Updated Data Dictionary to v1.6.0
|
||||
- Updated schema SQL files (`lcbp3-v1.6.0-schema.sql`, seed files)
|
||||
|
||||
## 1.5.1 (2025-12-10)
|
||||
|
||||
### Summary
|
||||
**Major Milestone: System Feature Complete (~95%)** - Ready for UAT and production deployment.
|
||||
|
||||
All core modules implemented and operational. Backend and frontend fully integrated with comprehensive admin tools.
|
||||
|
||||
### Backend Completed ✅
|
||||
|
||||
#### Core Infrastructure
|
||||
- ✅ All 18 core modules implemented and tested
|
||||
- ✅ JWT Authentication with Refresh Token mechanism
|
||||
- ✅ RBAC 4-Level (Global, Organization, Project, Contract) using CASL
|
||||
- ✅ Document Numbering with Redis Redlock + Optimistic Locking
|
||||
- ✅ Workflow Engine (DSL-based Hybrid Engine with legacy support)
|
||||
- ✅ Two-Phase File Storage with ClamAV Virus Scanning
|
||||
- ✅ Global Audit Logging with Interceptor
|
||||
- ✅ Health Monitoring & Metrics endpoints
|
||||
|
||||
#### Business Modules
|
||||
- ✅ **Correspondence Module** - Master-Revision pattern, Workflow integration, References
|
||||
- ✅ **RFA Module** - Full CRUD, Item management, Revision handling, Approval workflow
|
||||
- ✅ **Drawing Module** - Separated into Shop Drawing & Contract Drawing
|
||||
- ✅ **Transmittal Module** - Document transmittal tracking
|
||||
- ✅ **Circulation Module** - Circulation sheet management
|
||||
- ✅ **Elasticsearch Integration** - Direct indexing, Full-text search (95% complete)
|
||||
|
||||
#### Supporting Services
|
||||
- ✅ **Notification System** - Email and LINE notification integration
|
||||
- ✅ **Master Data Management** - Consolidated service for Organizations, Projects, Disciplines, Types
|
||||
- ✅ **User Management** - CRUD, Assignments, Preferences, Soft Delete
|
||||
- ✅ **Dashboard Service** - Statistics and reporting APIs
|
||||
- ✅ **JSON Schema Validation** - Dynamic schema validation for documents
|
||||
|
||||
### Frontend Completed ✅
|
||||
|
||||
#### Application Structure
|
||||
- ✅ All 15 frontend tasks (FE-001 to FE-015) completed
|
||||
- ✅ Next.js 14 App Router with TypeScript
|
||||
- ✅ Complete UI implementation (17 component groups, 22 Shadcn/UI components)
|
||||
- ✅ TanStack Query for server state management
|
||||
- ✅ Zustand for client state management
|
||||
- ✅ React Hook Form + Zod for form validation
|
||||
- ✅ Responsive layout (Desktop & Mobile)
|
||||
|
||||
#### End-User Modules
|
||||
- ✅ **Authentication UI** - Login, Token Management, Session Sync
|
||||
- ✅ **RBAC UI** - `<Can />` component for permission-based rendering
|
||||
- ✅ **Correspondence UI** - List, Create, Detail views with file uploads
|
||||
- ✅ **RFA UI** - List, Create, Item management
|
||||
- ✅ **Drawing UI** - Contract & Shop drawing lists, Upload forms
|
||||
- ✅ **Search UI** - Global search bar, Advanced filtering with Elasticsearch
|
||||
- ✅ **Dashboard** - Real-time KPI cards, Activity feed, Pending tasks
|
||||
- ✅ **Circulation UI** - Circulation sheet management with DataTable
|
||||
- ✅ **Transmittal UI** - Transmittal tracking and management
|
||||
|
||||
#### Admin Panel (10 Routes)
|
||||
- ✅ **Workflow Configuration** - DSL Editor, Visual Builder, Workflow Definition management
|
||||
- ✅ **Document Numbering Config** - Template Editor, Token Tester, Sequence Viewer
|
||||
- ✅ **User Management** - CRUD, Role assignments, Preferences
|
||||
- ✅ **Organization Management** - Organization CRUD and hierarchy
|
||||
- ✅ **Project Management** - Project and contract administration
|
||||
- ✅ **Reference Data Management** - CRUD for Disciplines, Types, Categories (6 modules)
|
||||
- ✅ **Security Administration** - RBAC Matrix, Roles, Active Sessions (2 modules)
|
||||
- ✅ **Audit Logs** - Comprehensive audit log viewer
|
||||
- ✅ **System Logs** - System log monitoring
|
||||
- ✅ **Settings** - System configuration
|
||||
|
||||
### Database 💾
|
||||
- ✅ Schema v1.5.1 with standardized audit columns (`created_at`, `updated_at`, `deleted_at`)
|
||||
- ✅ Complete seed data for all master tables
|
||||
- ✅ Migration scripts and patches (`patch-audit-columns.sql`)
|
||||
- ✅ Data Dictionary v1.5.1 documentation
|
||||
|
||||
### Documentation 📚
|
||||
- ✅ Complete specs/ reorganization to v1.5.1
|
||||
- ✅ 21 requirements documents in `specs/01-requirements/`
|
||||
- ✅ 17 ADRs (Architecture Decision Records) in `specs/05-decisions/`
|
||||
- ✅ Implementation guides for Backend & Frontend
|
||||
- ✅ Operations guides for critical features (Document Numbering)
|
||||
- ✅ Comprehensive progress reports updated
|
||||
- ✅ Task archiving to `specs/09-history/` (27 completed tasks)
|
||||
|
||||
### Bug Fixes 🐛
|
||||
- 🐛 Fixed role selection bug in User Edit form (2025-12-09)
|
||||
- 🐛 Fixed workflow permissions - 403 error on workflow action endpoints
|
||||
- 🐛 Fixed TypeORM relation errors in RFA and Drawing services
|
||||
- 🐛 Fixed token refresh infinite loop in authentication
|
||||
- 🐛 Fixed database schema alignment issues (audit columns)
|
||||
- 🐛 Fixed "drawings.map is not a function" by handling paginated responses
|
||||
- 🐛 Fixed invalid refresh token error loop
|
||||
|
||||
### Changed 📝
|
||||
- 📝 Updated progress reports to reflect ~95% backend, 100% frontend completion
|
||||
- 📝 Aligned all TypeORM entities with schema v1.5.1
|
||||
- 📝 Enhanced data dictionary with business rules
|
||||
- 📝 Archived 27 completed task files to `specs/09-history/`
|
||||
|
||||
## 1.5.0 (2025-11-30)
|
||||
|
||||
### Summary
|
||||
Initial spec-kit structure establishment and documentation organization.
|
||||
|
||||
### Changed
|
||||
|
||||
- Changed the version to 1.5.0
|
||||
- Modified to Spec-kit
|
||||
|
||||
### Summary
|
||||
687
CONTRIBUTING.md
Normal file
687
CONTRIBUTING.md
Normal file
@@ -0,0 +1,687 @@
|
||||
# 📝 Contributing to LCBP3-DMS Specifications
|
||||
|
||||
> แนวทางการมีส่วนร่วมในการพัฒนาเอกสาร Specifications ของโครงการ LCBP3-DMS
|
||||
|
||||
ยินดีต้อนรับสู่คู่มือการมีส่วนร่วมในการพัฒนาเอกสาร Specifications! เอกสารนี้จะช่วยให้คุณเข้าใจวิธีการสร้าง แก้ไข และปรับปรุงเอกสารข้อกำหนดของโครงการได้อย่างมีประสิทธิภาพ
|
||||
|
||||
---
|
||||
|
||||
## 📚 Table of Contents
|
||||
|
||||
- [ภาพรวม Specification Structure](#-specification-structure)
|
||||
- [หลักการเขียน Specifications](#-writing-principles)
|
||||
- [Workflow การแก้ไข Specs](#-contribution-workflow)
|
||||
- [Template และ Guidelines](#-templates--guidelines)
|
||||
- [Review Process](#-review-process)
|
||||
- [Best Practices](#-best-practices)
|
||||
- [Tools และ Resources](#-tools--resources)
|
||||
|
||||
---
|
||||
|
||||
## 🗂️ Specification Structure
|
||||
|
||||
โครงสร้างเอกสาร Specifications ของโครงการแบ่งออกเป็น 9 หมวดหลัก:
|
||||
|
||||
```
|
||||
specs/
|
||||
├── 00-overview/ # ภาพรวมโครงการ (3 docs)
|
||||
│ ├── README.md # Project overview
|
||||
│ ├── glossary.md # คำศัพท์เทคนิค
|
||||
│ └── quick-start.md # Quick start guide
|
||||
│
|
||||
├── 01-requirements/ # ข้อกำหนดระบบ (21 docs)
|
||||
│ ├── README.md # Requirements overview
|
||||
│ ├── 01-objectives.md # วัตถุประสงค์
|
||||
│ ├── 02-architecture.md # สถาปัตยกรรม
|
||||
│ ├── 03-functional-requirements.md
|
||||
│ ├── 03.1-project-management.md
|
||||
│ ├── 03.2-correspondence.md
|
||||
│ ├── 03.3-rfa.md
|
||||
│ ├── 03.4-contract-drawing.md
|
||||
│ ├── 03.5-shop-drawing.md
|
||||
│ ├── 03.6-unified-workflow.md
|
||||
│ ├── 03.7-transmittals.md
|
||||
│ ├── 03.8-circulation-sheet.md
|
||||
│ ├── 03.9-logs.md
|
||||
│ ├── 03.10-file-handling.md
|
||||
│ ├── 03.11-document-numbering.md
|
||||
│ ├── 03.12-json-details.md
|
||||
│ ├── 04-access-control.md
|
||||
│ ├── 05-ui-ux.md
|
||||
│ ├── 06-non-functional.md
|
||||
│ └── 07-testing.md
|
||||
│
|
||||
├── 02-architecture/ # สถาปัตยกรรมระบบ (4 docs)
|
||||
│ ├── README.md
|
||||
│ ├── system-architecture.md
|
||||
│ ├── api-design.md
|
||||
│ └── data-model.md
|
||||
│
|
||||
├── 03-implementation/ # แผนการพัฒนา (5 docs)
|
||||
│ ├── README.md
|
||||
│ ├── backend-guidelines.md
|
||||
│ ├── frontend-guidelines.md
|
||||
│ ├── testing-strategy.md
|
||||
│ └── code-standards.md
|
||||
│
|
||||
├── 04-operations/ # การดำเนินงาน (9 docs)
|
||||
│ ├── README.md
|
||||
│ ├── deployment.md
|
||||
│ ├── monitoring.md
|
||||
│ └── ...
|
||||
│
|
||||
├── 05-decisions/ # Architecture Decision Records (17 ADRs)
|
||||
│ ├── README.md
|
||||
│ ├── ADR-001-workflow-engine.md
|
||||
│ ├── ADR-002-document-numbering.md
|
||||
│ └── ...
|
||||
│
|
||||
├── 06-tasks/ # Active Tasks & Progress (34 files)
|
||||
│ ├── frontend-progress-report.md
|
||||
│ ├── backend-progress-report.md
|
||||
│ └── ...
|
||||
│
|
||||
├── 07-database/ # Database Schema (8 files)
|
||||
│ ├── lcbp3-v1.5.1-schema.sql
|
||||
│ ├── lcbp3-v1.5.1-seed.sql
|
||||
│ ├── data-dictionary-v1.5.1.md
|
||||
│ └── ...
|
||||
│
|
||||
└── 09-history/ # Archived Implementations (9 files)
|
||||
└── ...
|
||||
```
|
||||
|
||||
### 📋 หมวดหมู่เอกสาร
|
||||
|
||||
| หมวด | วัตถุประสงค์ | ผู้ดูแล |
|
||||
| --------------------- | ----------------------------- | ----------------------------- |
|
||||
| **00-overview** | ภาพรวมโครงการและคำศัพท์ | Project Manager |
|
||||
| **01-requirements** | ข้อกำหนดฟังก์ชันและระบบ | Business Analyst + Tech Lead |
|
||||
| **02-architecture** | สถาปัตยกรรมและการออกแบบ | Tech Lead + Architects |
|
||||
| **03-implementation** | แผนการพัฒนาและ Implementation | Development Team Leads |
|
||||
| **04-operations** | Deployment และ Operations | DevOps Team |
|
||||
| **05-decisions** | Architecture Decision Records | Tech Lead + Senior Developers |
|
||||
| **06-tasks** | Active Tasks & Progress | All Team Members |
|
||||
| **07-database** | Database Schema & Seed Data | Backend Lead + DBA |
|
||||
| **09-history** | Archived Implementations | Tech Lead |
|
||||
|
||||
---
|
||||
|
||||
## ✍️ Writing Principles
|
||||
|
||||
### 1. ภาษาที่ใช้
|
||||
|
||||
- **ชื่อเรื่อง (Headings)**: ภาษาไทยหรืออังกฤษ (ตามบริบท)
|
||||
- **เนื้อหาหลัก**: ภาษาไทย
|
||||
- **Code Examples**: ภาษาอังกฤษ
|
||||
- **Technical Terms**: ภาษาอังกฤษ (พร้อมคำอธิบายภาษาไทย)
|
||||
|
||||
### 2. รูปแบบการเขียน
|
||||
|
||||
#### ✅ ถูกต้อง
|
||||
|
||||
````markdown
|
||||
## 3.2 การจัดการเอกสารโต้ตอบ (Correspondence Management)
|
||||
|
||||
ระบบต้องรองรับการจัดการเอกสารโต้ตอบ (Correspondence) ระหว่างองค์กร โดยมีฟีเจอร์ดังนี้:
|
||||
|
||||
- **สร้างเอกสาร**: ผู้ใช้สามารถสร้างเอกสารใหม่ได้
|
||||
- **แก้ไขเอกสาร**: รองรับการแก้ไข Draft
|
||||
- **ส่งเอกสาร**: ส่งผ่าน Workflow Engine
|
||||
|
||||
### ตัวอย่าง API Endpoint
|
||||
|
||||
```typescript
|
||||
POST /api/correspondences
|
||||
{
|
||||
"subject": "Request for Information",
|
||||
"type_id": 1,
|
||||
"to_org_id": 2
|
||||
}
|
||||
```
|
||||
````
|
||||
|
||||
````
|
||||
|
||||
#### ❌ ผิด
|
||||
|
||||
```markdown
|
||||
## correspondence management
|
||||
|
||||
ระบบต้องรองรับ correspondence ระหว่างองค์กร
|
||||
|
||||
- สร้างได้
|
||||
- แก้ไขได้
|
||||
- ส่งได้
|
||||
````
|
||||
|
||||
### 3. โครงสร้างเอกสาร
|
||||
|
||||
ทุกเอกสารควรมีโครงสร้างดังนี้:
|
||||
|
||||
```markdown
|
||||
# [ชื่อเอกสาร]
|
||||
|
||||
> คำอธิบายสั้นๆ เกี่ยวกับเอกสาร
|
||||
|
||||
## Table of Contents (ถ้าเอกสารยาว)
|
||||
|
||||
- [Section 1](#section-1)
|
||||
- [Section 2](#section-2)
|
||||
|
||||
## Overview
|
||||
|
||||
[ภาพรวมของหัวข้อ]
|
||||
|
||||
## [Main Sections]
|
||||
|
||||
[เนื้อหาหลัก]
|
||||
|
||||
## Related Documents
|
||||
|
||||
- [Link to related spec 1]
|
||||
- [Link to related spec 2]
|
||||
|
||||
---
|
||||
|
||||
**Last Updated**: 2025-11-30
|
||||
**Version**: 1.4.5
|
||||
**Status**: Draft | Review | Approved
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔄 Contribution Workflow
|
||||
|
||||
### ขั้นตอนการแก้ไข Specifications
|
||||
|
||||
#### 1. สร้าง Issue (ถ้าจำเป็น)
|
||||
|
||||
```bash
|
||||
# ใน Gitea Issues
|
||||
Title: [SPEC] Update Correspondence Requirements
|
||||
Description:
|
||||
- เพิ่มข้อกำหนดการ CC หลายองค์กร
|
||||
- อัพเดท Workflow diagram
|
||||
- เพิ่ม validation rules
|
||||
```
|
||||
|
||||
#### 2. สร้าง Branch
|
||||
|
||||
```bash
|
||||
# Naming convention
|
||||
git checkout -b spec/[category]/[description]
|
||||
|
||||
# ตัวอย่าง
|
||||
git checkout -b spec/requirements/update-correspondence
|
||||
git checkout -b spec/architecture/add-workflow-diagram
|
||||
git checkout -b spec/adr/file-storage-strategy
|
||||
```
|
||||
|
||||
#### 3. แก้ไขเอกสาร
|
||||
|
||||
```bash
|
||||
# แก้ไขไฟล์ที่เกี่ยวข้อง
|
||||
vim specs/01-requirements/03.2-correspondence.md
|
||||
|
||||
# ตรวจสอบ markdown syntax
|
||||
pnpm run lint:markdown
|
||||
|
||||
# Preview (ถ้ามี)
|
||||
pnpm run preview:specs
|
||||
```
|
||||
|
||||
#### 4. Commit Changes
|
||||
|
||||
```bash
|
||||
# Commit message format
|
||||
git commit -m "spec(requirements): update correspondence CC requirements
|
||||
|
||||
- Add support for multiple CC organizations
|
||||
- Update workflow diagram
|
||||
- Add validation rules for CC list
|
||||
- Link to ADR-003
|
||||
|
||||
Refs: #123"
|
||||
|
||||
# Commit types:
|
||||
# spec(category): สำหรับการแก้ไข specs
|
||||
# docs(category): สำหรับเอกสารทั่วไป
|
||||
# adr(number): สำหรับ Architecture Decision Records
|
||||
```
|
||||
|
||||
#### 5. Push และสร้าง Pull Request
|
||||
|
||||
```bash
|
||||
git push origin spec/requirements/update-correspondence
|
||||
```
|
||||
|
||||
**Pull Request Template:**
|
||||
|
||||
```markdown
|
||||
## 📝 Specification Changes
|
||||
|
||||
### Category
|
||||
|
||||
- [ ] Requirements
|
||||
- [ ] Architecture
|
||||
- [ ] Implementation
|
||||
- [ ] Operations
|
||||
- [ ] ADR
|
||||
|
||||
### Type of Change
|
||||
|
||||
- [ ] New specification
|
||||
- [ ] Update existing spec
|
||||
- [ ] Fix typo/formatting
|
||||
- [ ] Add diagram/example
|
||||
|
||||
### Description
|
||||
|
||||
[อธิบายการเปลี่ยนแปลง]
|
||||
|
||||
### Impact Analysis
|
||||
|
||||
- **Affected Modules**: [ระบุ modules ที่ได้รับผลกระทบ]
|
||||
- **Breaking Changes**: Yes/No
|
||||
- **Migration Required**: Yes/No
|
||||
|
||||
### Related Documents
|
||||
|
||||
- Related Specs: [links]
|
||||
- Related Issues: #123
|
||||
- Related ADRs: ADR-001
|
||||
|
||||
### Checklist
|
||||
|
||||
- [ ] เขียนเป็นภาษาไทย (เนื้อหาหลัก)
|
||||
- [ ] ใช้ Technical terms ภาษาอังกฤษ
|
||||
- [ ] มี Code examples (ถ้าเกี่ยวข้อง)
|
||||
- [ ] อัพเดท Table of Contents
|
||||
- [ ] อัพเดท Last Updated date
|
||||
- [ ] ตรวจสอบ markdown syntax
|
||||
- [ ] ตรวจสอบ internal links
|
||||
- [ ] เพิ่ม Related Documents
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📋 Templates & Guidelines
|
||||
|
||||
### Template: Functional Requirement
|
||||
|
||||
````markdown
|
||||
## [Feature ID]. [Feature Name]
|
||||
|
||||
### วัตถุประสงค์ (Purpose)
|
||||
|
||||
[อธิบายว่าฟีเจอร์นี้ทำอะไร และทำไมต้องมี]
|
||||
|
||||
### ข้อกำหนดหลัก (Requirements)
|
||||
|
||||
#### [REQ-001] [Requirement Title]
|
||||
|
||||
**Priority**: High | Medium | Low
|
||||
**Status**: Proposed | Approved | Implemented
|
||||
|
||||
**Description**:
|
||||
[คำอธิบายข้อกำหนด]
|
||||
|
||||
**Acceptance Criteria**:
|
||||
|
||||
- [ ] Criterion 1
|
||||
- [ ] Criterion 2
|
||||
- [ ] Criterion 3
|
||||
|
||||
**Technical Notes**:
|
||||
|
||||
```typescript
|
||||
// ตัวอย่าง code หรือ API
|
||||
```
|
||||
````
|
||||
|
||||
**Related**:
|
||||
|
||||
- Dependencies: [REQ-002], [REQ-003]
|
||||
- Conflicts: None
|
||||
- ADRs: [ADR-001]
|
||||
|
||||
### User Stories
|
||||
|
||||
```gherkin
|
||||
Given [context]
|
||||
When [action]
|
||||
Then [expected result]
|
||||
```
|
||||
|
||||
### UI/UX Requirements
|
||||
|
||||
[Screenshots, Wireframes, หรือ Mockups]
|
||||
|
||||
### Non-Functional Requirements
|
||||
|
||||
- **Performance**: [เช่น Response time < 200ms]
|
||||
- **Security**: [เช่น RBAC required]
|
||||
- **Scalability**: [เช่น Support 100 concurrent users]
|
||||
|
||||
### Test Scenarios
|
||||
|
||||
1. **Happy Path**: [อธิบาย]
|
||||
2. **Edge Cases**: [อธิบาย]
|
||||
3. **Error Handling**: [อธิบาย]
|
||||
|
||||
````
|
||||
|
||||
### Template: Architecture Decision Record (ADR)
|
||||
|
||||
```markdown
|
||||
# ADR-[NUMBER]: [Title]
|
||||
|
||||
**Status**: Proposed | Accepted | Deprecated | Superseded
|
||||
**Date**: YYYY-MM-DD
|
||||
**Deciders**: [ชื่อผู้ตัดสินใจ]
|
||||
**Technical Story**: [Issue/Epic link]
|
||||
|
||||
## Context and Problem Statement
|
||||
|
||||
[อธิบายปัญหาและบริบท]
|
||||
|
||||
## Decision Drivers
|
||||
|
||||
- [Driver 1]
|
||||
- [Driver 2]
|
||||
- [Driver 3]
|
||||
|
||||
## Considered Options
|
||||
|
||||
### Option 1: [Title]
|
||||
|
||||
**Pros**:
|
||||
- [Pro 1]
|
||||
- [Pro 2]
|
||||
|
||||
**Cons**:
|
||||
- [Con 1]
|
||||
- [Con 2]
|
||||
|
||||
### Option 2: [Title]
|
||||
|
||||
[เหมือนข้างบน]
|
||||
|
||||
## Decision Outcome
|
||||
|
||||
**Chosen option**: "[Option X]"
|
||||
|
||||
**Justification**:
|
||||
[อธิบายเหตุผล]
|
||||
|
||||
**Consequences**:
|
||||
- **Positive**: [ผลดี]
|
||||
- **Negative**: [ผลเสีย]
|
||||
- **Neutral**: [ผลกระทบอื่นๆ]
|
||||
|
||||
## Implementation
|
||||
|
||||
```typescript
|
||||
// ตัวอย่าง implementation
|
||||
````
|
||||
|
||||
## Validation
|
||||
|
||||
[วิธีการตรวจสอบว่า decision นี้ถูกต้อง]
|
||||
|
||||
## Related Decisions
|
||||
|
||||
- Supersedes: [ADR-XXX]
|
||||
- Related to: [ADR-YYY]
|
||||
- Conflicts with: None
|
||||
|
||||
## References
|
||||
|
||||
- [Link 1]
|
||||
- [Link 2]
|
||||
|
||||
````
|
||||
|
||||
---
|
||||
|
||||
## 👀 Review Process
|
||||
|
||||
### Reviewer Checklist
|
||||
|
||||
#### ✅ Content Quality
|
||||
|
||||
- [ ] **Clarity**: เนื้อหาชัดเจน เข้าใจง่าย
|
||||
- [ ] **Completeness**: ครบถ้วนตามโครงสร้าง
|
||||
- [ ] **Accuracy**: ข้อมูลถูกต้อง ตรงตามความเป็นจริง
|
||||
- [ ] **Consistency**: สอดคล้องกับ specs อื่นๆ
|
||||
- [ ] **Traceability**: มี links ไปยังเอกสารที่เกี่ยวข้อง
|
||||
|
||||
#### ✅ Technical Quality
|
||||
|
||||
- [ ] **Feasibility**: สามารถ implement ได้จริง
|
||||
- [ ] **Performance**: คำนึงถึง performance implications
|
||||
- [ ] **Security**: ระบุ security requirements
|
||||
- [ ] **Scalability**: รองรับการขยายตัว
|
||||
- [ ] **Maintainability**: ง่ายต่อการบำรุงรักษา
|
||||
|
||||
#### ✅ Format & Style
|
||||
|
||||
- [ ] **Markdown Syntax**: ไม่มี syntax errors
|
||||
- [ ] **Language**: ใช้ภาษาไทยสำหรับเนื้อหาหลัก
|
||||
- [ ] **Code Examples**: มี syntax highlighting
|
||||
- [ ] **Diagrams**: ชัดเจน อ่านง่าย
|
||||
- [ ] **Links**: ทุก link ใช้งานได้
|
||||
|
||||
### Review Levels
|
||||
|
||||
| Level | Reviewer | Scope |
|
||||
| ------------------------ | --------------- | ------------------------------- |
|
||||
| **L1: Peer Review** | Team Member | Format, Clarity, Completeness |
|
||||
| **L2: Technical Review** | Tech Lead | Technical Accuracy, Feasibility |
|
||||
| **L3: Approval** | Project Manager | Business Alignment, Impact |
|
||||
|
||||
### Review Timeline
|
||||
|
||||
- **L1 Review**: 1-2 วันทำการ
|
||||
- **L2 Review**: 2-3 วันทำการ
|
||||
- **L3 Approval**: 1-2 วันทำการ
|
||||
|
||||
---
|
||||
|
||||
## 💡 Best Practices
|
||||
|
||||
### 1. เขียนให้ชัดเจนและเฉพาะเจาะจง
|
||||
|
||||
#### ✅ ถูกต้อง
|
||||
```markdown
|
||||
ระบบต้องรองรับการอัปโหลดไฟล์ประเภท PDF, DWG, DOCX, XLSX, ZIP
|
||||
โดยมีขนาดไม่เกิน 50MB ต่อไฟล์ และต้องผ่านการ scan virus ด้วย ClamAV
|
||||
````
|
||||
|
||||
#### ❌ ผิด
|
||||
|
||||
```markdown
|
||||
ระบบต้องรองรับการอัปโหลดไฟล์หลายประเภท
|
||||
```
|
||||
|
||||
### 2. ใช้ Diagrams และ Examples
|
||||
|
||||
````markdown
|
||||
### Workflow Diagram
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
A[Draft] --> B[Submitted]
|
||||
B --> C{Review}
|
||||
C -->|Approve| D[Approved]
|
||||
C -->|Reject| E[Rejected]
|
||||
```
|
||||
````
|
||||
|
||||
````
|
||||
|
||||
### 3. อ้างอิงเอกสารที่เกี่ยวข้อง
|
||||
|
||||
```markdown
|
||||
## Related Documents
|
||||
|
||||
- Requirements: [03.2-correspondence.md](./03.2-correspondence.md)
|
||||
- Architecture: [system-architecture.md](../02-architecture/system-architecture.md)
|
||||
- ADR: [ADR-001: Workflow Engine](../05-decisions/001-workflow-engine.md)
|
||||
- Implementation: [Backend Plan](../../docs/2_Backend_Plan_V1_4_5.md)
|
||||
````
|
||||
|
||||
### 4. Version Control
|
||||
|
||||
```markdown
|
||||
---
|
||||
|
||||
**Document History**:
|
||||
|
||||
| Version | Date | Author | Changes |
|
||||
| ------- | ---------- | ---------- | --------------- |
|
||||
| 1.0.0 | 2025-01-15 | John Doe | Initial version |
|
||||
| 1.1.0 | 2025-02-20 | Jane Smith | Add CC support |
|
||||
| 1.2.0 | 2025-03-10 | John Doe | Update workflow |
|
||||
|
||||
**Current Version**: 1.2.0
|
||||
**Status**: Approved
|
||||
**Last Updated**: 2025-03-10
|
||||
```
|
||||
|
||||
### 5. ใช้ Consistent Terminology
|
||||
|
||||
อ้างอิงจาก [glossary.md](./specs/00-overview/glossary.md) เสมอ
|
||||
|
||||
```markdown
|
||||
- ✅ ใช้: "Correspondence" (เอกสารโต้ตอบ)
|
||||
- ❌ ไม่ใช้: "Letter", "Document", "Communication"
|
||||
|
||||
- ✅ ใช้: "RFA" (Request for Approval)
|
||||
- ❌ ไม่ใช้: "Approval Request", "Submit for Approval"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Tools & Resources
|
||||
|
||||
### Markdown Tools
|
||||
|
||||
```bash
|
||||
# Lint markdown files
|
||||
pnpm run lint:markdown
|
||||
|
||||
# Fix markdown issues
|
||||
pnpm run lint:markdown:fix
|
||||
|
||||
# Preview specs (if available)
|
||||
pnpm run preview:specs
|
||||
```
|
||||
|
||||
### Recommended VS Code Extensions
|
||||
|
||||
```json
|
||||
{
|
||||
"recommendations": [
|
||||
"yzhang.markdown-all-in-one",
|
||||
"DavidAnson.vscode-markdownlint",
|
||||
"bierner.markdown-mermaid",
|
||||
"shd101wyy.markdown-preview-enhanced",
|
||||
"streetsidesoftware.code-spell-checker"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Markdown Linting Rules
|
||||
|
||||
Create `.markdownlint.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"default": true,
|
||||
"MD013": false,
|
||||
"MD033": false,
|
||||
"MD041": false
|
||||
}
|
||||
```
|
||||
|
||||
### Diagram Tools
|
||||
|
||||
- **Mermaid**: สำหรับ flowcharts, sequence diagrams
|
||||
- **PlantUML**: สำหรับ UML diagrams
|
||||
- **Draw.io**: สำหรับ architecture diagrams
|
||||
|
||||
### Reference Documents
|
||||
|
||||
- [Markdown Guide](https://www.markdownguide.org/)
|
||||
- [Mermaid Documentation](https://mermaid-js.github.io/)
|
||||
- [ADR Template](https://github.com/joelparkerhenderson/architecture-decision-record)
|
||||
|
||||
---
|
||||
|
||||
## 📞 Getting Help
|
||||
|
||||
### คำถามเกี่ยวกับ Specs
|
||||
|
||||
1. **ตรวจสอบเอกสารที่มีอยู่**: [specs/](./specs/)
|
||||
2. **ดู Glossary**: [specs/00-overview/glossary.md](./specs/00-overview/glossary.md)
|
||||
3. **ค้นหา Issues**: [Gitea Issues](https://git.np-dms.work/lcbp3/lcbp3-dms/issues)
|
||||
4. **ถาม Team**: [ช่องทางการติดต่อ]
|
||||
|
||||
### การรายงานปัญหา
|
||||
|
||||
```markdown
|
||||
**Title**: [SPEC] [Category] [Brief description]
|
||||
|
||||
**Description**:
|
||||
|
||||
- **Current State**: [อธิบายปัญหาปัจจุบัน]
|
||||
- **Expected State**: [อธิบายสิ่งที่ควรจะเป็น]
|
||||
- **Affected Documents**: [ระบุเอกสารที่เกี่ยวข้อง]
|
||||
- **Proposed Solution**: [เสนอแนะวิธีแก้ไข]
|
||||
|
||||
**Labels**: spec, [category]
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Quality Standards
|
||||
|
||||
### Definition of Done (DoD) สำหรับ Spec Changes
|
||||
|
||||
- [x] เนื้อหาครบถ้วนตามโครงสร้าง
|
||||
- [x] ใช้ภาษาไทยสำหรับเนื้อหาหลัก
|
||||
- [x] มี code examples (ถ้าเกี่ยวข้อง)
|
||||
- [x] มี diagrams (ถ้าจำเป็น)
|
||||
- [x] อัพเดท Table of Contents
|
||||
- [x] อัพเดท Last Updated date
|
||||
- [x] ผ่าน markdown linting
|
||||
- [x] ตรวจสอบ internal links
|
||||
- [x] เพิ่ม Related Documents
|
||||
- [x] ผ่าน L1 Peer Review
|
||||
- [x] ผ่าน L2 Technical Review
|
||||
- [x] ได้รับ L3 Approval
|
||||
|
||||
---
|
||||
|
||||
## 📜 License & Copyright
|
||||
|
||||
เอกสาร Specifications ทั้งหมดเป็นทรัพย์สินของโครงการ LCBP3-DMS
|
||||
**Internal Use Only** - ห้ามเผยแพร่ภายนอก
|
||||
|
||||
---
|
||||
|
||||
## 🙏 Acknowledgments
|
||||
|
||||
ขอบคุณทุกท่านที่มีส่วนร่วมในการพัฒนาเอกสาร Specifications ของโครงการ!
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
|
||||
**Questions?** Contact the Tech Lead or Project Manager
|
||||
|
||||
[Specs Directory](./specs) • [Main README](./README.md) • [Documentation](./docs)
|
||||
|
||||
</div>
|
||||
611
README.md
Normal file
611
README.md
Normal file
@@ -0,0 +1,611 @@
|
||||
# 📋 LCBP3-DMS - Document Management System
|
||||
|
||||
> **Laem Chabang Port Phase 3 - Document Management System**
|
||||
>
|
||||
> ระบบบริหารจัดการเอกสารโครงการแบบครบวงจร สำหรับโครงการก่อสร้างท่าเรือแหลมฉบังระยะที่ 3
|
||||
|
||||
[](./CHANGELOG.md)
|
||||
[]()
|
||||
[]()
|
||||
|
||||
---
|
||||
|
||||
## 📈 Current Status (As of 2025-12-13)
|
||||
|
||||
**Overall Progress: ~95% Feature Complete - Production Ready**
|
||||
|
||||
- ✅ **Backend**: All 18 core modules implemented (~95%)
|
||||
- ✅ **Frontend**: All 15 UI tasks completed (100%)
|
||||
- ✅ **Database**: Schema v1.6.0 active with complete seed data
|
||||
- ✅ **Documentation**: Comprehensive specs/ at v1.6.0
|
||||
- ✅ **Admin Tools**: Workflow & Numbering configuration UIs complete
|
||||
- 🔄 **Testing**: E2E tests and UAT in progress
|
||||
- 📋 **Next**: Production deployment preparation
|
||||
|
||||
---
|
||||
|
||||
## 🎯 ภาพรวมโครงการ
|
||||
|
||||
LCBP3-DMS เป็นระบบบริหารจัดการเอกสารโครงการที่ออกแบบมาเพื่อรองรับการทำงานของโครงการก่อสร้างขนาดใหญ่ โดยเน้นที่:
|
||||
|
||||
- **ความปลอดภัยสูงสุด** - Security-first approach ด้วย RBAC 4 ระดับ
|
||||
- **ความถูกต้องของข้อมูล** - Data Integrity ผ่าน Transaction และ Locking Mechanisms
|
||||
- **ความยืดหยุ่น** - Unified Workflow Engine รองรับ Workflow ที่ซับซ้อน
|
||||
- **ความทนทาน** - Resilience Patterns และ Error Handling ที่ครอบคลุม
|
||||
|
||||
### ✨ ฟีเจอร์หลัก
|
||||
|
||||
- 📝 **Correspondence Management** - จัดการเอกสารโต้ตอบระหว่างองค์กร
|
||||
- 🔧 **RFA Management** - ระบบขออนุมัติเอกสารทางเทคนิค
|
||||
- 📐 **Drawing Management** - จัดการแบบก่อสร้างและแบบคู่สัญญา
|
||||
- 🔄 **Workflow Engine** - DSL-based workflow สำหรับกระบวนการอนุมัติ
|
||||
- 📊 **Advanced Search** - ค้นหาเอกสารด้วย Elasticsearch
|
||||
- 🔐 **RBAC 4-Level** - ควบคุมสิทธิ์แบบละเอียด (Global, Organization, Project, Contract)
|
||||
- 📁 **Two-Phase File Storage** - จัดการไฟล์แบบ Transactional พร้อม Virus Scanning
|
||||
- 🔢 **Document Numbering** - สร้างเลขที่เอกสารอัตโนมัติ ป้องกัน Race Condition
|
||||
|
||||
---
|
||||
|
||||
## 🏗️ สถาปัตยกรรมระบบ
|
||||
|
||||
### Technology Stack
|
||||
|
||||
#### Backend (NestJS)
|
||||
|
||||
```typescript
|
||||
{
|
||||
"framework": "NestJS (TypeScript, ESM)",
|
||||
"database": "MariaDB 11.8",
|
||||
"orm": "TypeORM",
|
||||
"authentication": "JWT + Passport",
|
||||
"authorization": "CASL (RBAC)",
|
||||
"search": "Elasticsearch",
|
||||
"cache": "Redis",
|
||||
"queue": "BullMQ",
|
||||
"fileUpload": "Multer + ClamAV",
|
||||
"notification": "Nodemailer + n8n (LINE)",
|
||||
"documentation": "Swagger"
|
||||
}
|
||||
```
|
||||
|
||||
#### Frontend (Next.js)
|
||||
|
||||
```typescript
|
||||
{
|
||||
"framework": "Next.js 14+ (App Router)",
|
||||
"language": "TypeScript",
|
||||
"styling": "Tailwind CSS",
|
||||
"components": "shadcn/ui",
|
||||
"stateManagement": {
|
||||
"server": "TanStack Query (React Query)",
|
||||
"forms": "React Hook Form + Zod",
|
||||
"ui": "useState/useReducer"
|
||||
},
|
||||
"testing": "Vitest + Playwright"
|
||||
}
|
||||
```
|
||||
|
||||
#### Infrastructure
|
||||
|
||||
- **Server**: QNAP TS-473A (AMD Ryzen V1500B, 32GB RAM)
|
||||
- **Containerization**: Docker + Docker Compose (Container Station)
|
||||
- **Reverse Proxy**: Nginx Proxy Manager
|
||||
- **Version Control**: Gitea (Self-hosted)
|
||||
- **Domain**: `np-dms.work`
|
||||
|
||||
### โครงสร้างระบบ
|
||||
|
||||
```
|
||||
┌─────────────────┐
|
||||
│ Nginx Proxy │ ← SSL/TLS Termination
|
||||
│ Manager │
|
||||
└────────┬────────┘
|
||||
│
|
||||
┌────┴────┬────────────┬──────────┐
|
||||
│ │ │ │
|
||||
┌───▼───┐ ┌──▼──┐ ┌─────▼────┐ ┌──▼──┐
|
||||
│Next.js│ │NestJS│ │Elasticsearch│ │ n8n │
|
||||
│Frontend│ │Backend│ │ Search │ │Workflow│
|
||||
└───────┘ └──┬──┘ └──────────┘ └─────┘
|
||||
│
|
||||
┌────────┼────────┐
|
||||
│ │ │
|
||||
┌───▼───┐ ┌─▼──┐ ┌──▼────┐
|
||||
│MariaDB│ │Redis│ │ClamAV │
|
||||
│ DB │ │Cache│ │ Scan │
|
||||
└───────┘ └────┘ └───────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🚀 เริ่มต้นใช้งาน
|
||||
|
||||
### ข้อกำหนดระบบ
|
||||
|
||||
- **Node.js**: v20.x หรือสูงกว่า
|
||||
- **pnpm**: v8.x หรือสูงกว่า
|
||||
- **Docker**: v24.x หรือสูงกว่า
|
||||
- **MariaDB**: 11.8
|
||||
- **Redis**: 7.x
|
||||
|
||||
### การติดตั้ง
|
||||
|
||||
#### 1. Clone Repository
|
||||
|
||||
```bash
|
||||
git clone https://git.np-dms.work/lcbp3/lcbp3-dms.git
|
||||
cd lcbp3-dms
|
||||
```
|
||||
|
||||
#### 2. ติดตั้ง Dependencies
|
||||
|
||||
```bash
|
||||
# ติดตั้ง dependencies ทั้งหมด (backend + frontend)
|
||||
pnpm install
|
||||
```
|
||||
|
||||
#### 3. ตั้งค่า Environment Variables
|
||||
|
||||
**Backend:**
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
cp .env.example .env
|
||||
# แก้ไข .env ตามความเหมาะสม
|
||||
```
|
||||
|
||||
**Frontend:**
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
cp .env.local.example .env.local
|
||||
# แก้ไข .env.local ตามความเหมาะสม
|
||||
```
|
||||
|
||||
#### 4. ตั้งค่า Database
|
||||
|
||||
```bash
|
||||
# Import schema
|
||||
mysql -u root -p lcbp3_dev < docs/8_lcbp3_v1_4_5.sql
|
||||
|
||||
# Import seed data
|
||||
mysql -u root -p lcbp3_dev < docs/8_lcbp3_v1_4_5_seed.sql
|
||||
```
|
||||
|
||||
#### 5. รัน Development Server
|
||||
|
||||
**Backend:**
|
||||
|
||||
```bash
|
||||
cd backend
|
||||
pnpm run start:dev
|
||||
```
|
||||
|
||||
**Frontend:**
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
pnpm run dev
|
||||
```
|
||||
|
||||
### การเข้าถึงระบบ
|
||||
|
||||
- **Frontend**: `http://localhost:3000`
|
||||
- **Backend API**: `http://localhost:3001`
|
||||
- **API Documentation**: `http://localhost:3001/api`
|
||||
|
||||
### ข้อมูลเข้าสู่ระบบเริ่มต้น
|
||||
|
||||
```
|
||||
Superadmin:
|
||||
Username: admin@np-dms.work
|
||||
Password: (ดูใน seed data)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📁 โครงสร้างโปรเจกต์
|
||||
|
||||
```
|
||||
lcbp3-dms/
|
||||
├── backend/ # 🔧 NestJS Backend
|
||||
│ ├── src/
|
||||
│ │ ├── common/ # Shared utilities, guards, decorators
|
||||
│ │ ├── config/ # Configuration module
|
||||
│ │ ├── database/ # Database entities & migrations
|
||||
│ │ ├── modules/ # Feature modules (17 modules)
|
||||
│ │ │ ├── auth/ # JWT Authentication
|
||||
│ │ │ ├── user/ # User management & RBAC
|
||||
│ │ │ ├── project/ # Project & Contract management
|
||||
│ │ │ ├── correspondence/ # Correspondence module
|
||||
│ │ │ ├── rfa/ # Request for Approval
|
||||
│ │ │ ├── drawing/ # Contract & Shop Drawings
|
||||
│ │ │ ├── workflow-engine/# DSL Workflow Engine
|
||||
│ │ │ ├── document-numbering/ # Auto numbering
|
||||
│ │ │ ├── transmittal/ # Transmittal management
|
||||
│ │ │ ├── circulation/ # Circulation sheets
|
||||
│ │ │ ├── search/ # Elasticsearch integration
|
||||
│ │ │ ├── dashboard/ # Statistics & reporting
|
||||
│ │ │ ├── notification/ # Email/LINE notifications
|
||||
│ │ │ ├── monitoring/ # Health checks & metrics
|
||||
│ │ │ ├── master/ # Master data management
|
||||
│ │ │ ├── organizations/ # Organization management
|
||||
│ │ │ └── json-schema/ # JSON Schema validation
|
||||
│ │ └── main.ts
|
||||
│ ├── test/ # Unit & E2E tests
|
||||
│ └── package.json
|
||||
│
|
||||
├── frontend/ # 🎨 Next.js Frontend
|
||||
│ ├── app/ # App Router
|
||||
│ │ ├── (admin)/ # Admin panel routes
|
||||
│ │ │ └── admin/
|
||||
│ │ │ ├── workflows/ # Workflow configuration
|
||||
│ │ │ ├── numbering/ # Document numbering config
|
||||
│ │ │ ├── users/ # User management
|
||||
│ │ │ └── ...
|
||||
│ │ ├── (auth)/ # Authentication pages
|
||||
│ │ ├── (dashboard)/ # Main dashboard routes
|
||||
│ │ │ ├── correspondences/
|
||||
│ │ │ ├── rfas/
|
||||
│ │ │ ├── drawings/
|
||||
│ │ │ └── ...
|
||||
│ │ └── api/ # API routes (NextAuth)
|
||||
│ ├── components/ # React Components (15 groups)
|
||||
│ │ ├── ui/ # Shadcn/UI components
|
||||
│ │ ├── layout/ # Layout components
|
||||
│ │ ├── common/ # Shared components
|
||||
│ │ ├── correspondences/ # Correspondence UI
|
||||
│ │ ├── rfas/ # RFA UI
|
||||
│ │ ├── drawings/ # Drawing UI
|
||||
│ │ ├── workflows/ # Workflow builder
|
||||
│ │ ├── numbering/ # Numbering config UI
|
||||
│ │ ├── dashboard/ # Dashboard widgets
|
||||
│ │ ├── search/ # Search components
|
||||
│ │ └── ...
|
||||
│ ├── lib/ # Utilities & API clients
|
||||
│ │ ├── api/ # API client functions
|
||||
│ │ ├── services/ # Business logic services
|
||||
│ │ └── stores/ # Zustand state stores
|
||||
│ ├── types/ # TypeScript definitions
|
||||
│ └── package.json
|
||||
│
|
||||
├── specs/ # 📘 Project Specifications (v1.5.1)
|
||||
│ ├── 00-overview/ # Project overview & glossary
|
||||
│ ├── 01-requirements/ # Functional requirements (21 docs)
|
||||
│ ├── 02-architecture/ # System architecture
|
||||
│ ├── 03-implementation/ # Implementation guidelines
|
||||
│ ├── 04-operations/ # Deployment & operations
|
||||
│ ├── 05-decisions/ # ADRs (17 decisions)
|
||||
│ ├── 06-tasks/ # Active tasks & progress
|
||||
│ ├── 07-database/ # Schema v1.5.1 & seed data
|
||||
│ └── 09-history/ # Archived implementations
|
||||
│
|
||||
├── docs/ # 📚 Legacy documentation
|
||||
├── diagrams/ # 📊 Architecture diagrams
|
||||
├── infrastructure/ # 🐳 Docker & Deployment configs
|
||||
│
|
||||
├── .gemini/ # 🤖 AI agent configuration
|
||||
├── .agent/ # Agent workflows
|
||||
├── GEMINI.md # AI coding guidelines
|
||||
├── CONTRIBUTING.md # Contribution guidelines
|
||||
├── CHANGELOG.md # Version history
|
||||
└── pnpm-workspace.yaml # Monorepo configuration
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📚 เอกสารประกอบ
|
||||
|
||||
### เอกสารหลัก (specs/ folder)
|
||||
|
||||
| เอกสาร | คำอธิบาย | โฟลเดอร์ |
|
||||
| ------------------ | ------------------------------ | -------------------------- |
|
||||
| **Overview** | ภาพรวมโครงการ, Glossary | `specs/00-overview/` |
|
||||
| **Requirements** | ข้อกำหนดระบบและฟังก์ชันการทำงาน | `specs/01-requirements/` |
|
||||
| **Architecture** | สถาปัตยกรรมระบบ, ADRs | `specs/02-architecture/` |
|
||||
| **Implementation** | แนวทางการพัฒนา Backend/Frontend | `specs/03-implementation/` |
|
||||
| **Database** | Schema v1.6.0 + Seed Data | `specs/07-database/` |
|
||||
|
||||
### Schema & Seed Data
|
||||
|
||||
```bash
|
||||
# Import schema
|
||||
mysql -u root -p lcbp3_dev < specs/07-database/lcbp3-v1.6.0-schema.sql
|
||||
|
||||
# Import seed data
|
||||
mysql -u root -p lcbp3_dev < specs/07-database/lcbp3-v1.6.0-seed-basic.sql
|
||||
```
|
||||
|
||||
### Legacy Documentation
|
||||
|
||||
เอกสารเก่าอยู่ใน `docs/` folder
|
||||
|
||||
---
|
||||
|
||||
## 🔧 Development Guidelines
|
||||
|
||||
### Coding Standards
|
||||
|
||||
#### ภาษาที่ใช้
|
||||
|
||||
- **Code**: ภาษาอังกฤษ (English)
|
||||
- **Comments & Documentation**: ภาษาไทย (Thai)
|
||||
|
||||
#### TypeScript Rules
|
||||
|
||||
```typescript
|
||||
// ✅ ถูกต้อง
|
||||
interface User {
|
||||
user_id: number; // Property: snake_case
|
||||
firstName: string; // Variable: camelCase
|
||||
isActive: boolean; // Boolean: Verb + Noun
|
||||
}
|
||||
|
||||
// ❌ ผิด
|
||||
interface User {
|
||||
userId: number; // ไม่ใช้ camelCase สำหรับ property
|
||||
first_name: string; // ไม่ใช้ snake_case สำหรับ variable
|
||||
active: boolean; // ไม่ใช้ Verb + Noun
|
||||
}
|
||||
```
|
||||
|
||||
#### File Naming
|
||||
|
||||
```
|
||||
user-service.ts ✅ kebab-case
|
||||
UserService.ts ❌ PascalCase
|
||||
user_service.ts ❌ snake_case
|
||||
```
|
||||
|
||||
### Git Workflow
|
||||
|
||||
```bash
|
||||
# สร้าง feature branch
|
||||
git checkout -b feature/correspondence-module
|
||||
|
||||
# Commit message format
|
||||
git commit -m "feat(correspondence): add create correspondence endpoint"
|
||||
|
||||
# Types: feat, fix, docs, style, refactor, test, chore
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
```bash
|
||||
# Backend
|
||||
cd backend
|
||||
pnpm test # Unit tests
|
||||
pnpm test:e2e # E2E tests
|
||||
pnpm test:cov # Coverage
|
||||
|
||||
# Frontend
|
||||
cd frontend
|
||||
pnpm test # Unit tests
|
||||
pnpm test:e2e # Playwright E2E
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🔐 Security
|
||||
|
||||
### Security Features
|
||||
|
||||
- ✅ **JWT Authentication** - Access & Refresh Tokens
|
||||
- ✅ **RBAC 4-Level** - Global, Organization, Project, Contract
|
||||
- ✅ **Rate Limiting** - ป้องกัน Brute-force
|
||||
- ✅ **Virus Scanning** - ClamAV สำหรับไฟล์ที่อัปโหลด
|
||||
- ✅ **Input Validation** - ป้องกัน SQL Injection, XSS, CSRF
|
||||
- ✅ **Idempotency** - ป้องกันการทำรายการซ้ำ
|
||||
- ✅ **Audit Logging** - บันทึกการกระทำทั้งหมด
|
||||
|
||||
### Security Best Practices
|
||||
|
||||
1. **ห้ามเก็บ Secrets ใน Git**
|
||||
|
||||
- ใช้ `.env` สำหรับ Development
|
||||
- ใช้ `docker-compose.override.yml` (gitignored)
|
||||
|
||||
2. **Password Policy**
|
||||
|
||||
- ความยาวขั้นต่ำ: 8 ตัวอักษร
|
||||
- ต้องมี uppercase, lowercase, number, special character
|
||||
- เปลี่ยน password ทุก 90 วัน
|
||||
|
||||
3. **File Upload**
|
||||
- White-list file types: PDF, DWG, DOCX, XLSX, ZIP
|
||||
- Max size: 50MB
|
||||
- Virus scan ทุกไฟล์
|
||||
|
||||
---
|
||||
|
||||
## 🧪 Testing Strategy
|
||||
|
||||
### Test Pyramid
|
||||
|
||||
```
|
||||
/\
|
||||
/ \ E2E Tests (10%)
|
||||
/____\
|
||||
/ \ Integration Tests (20%)
|
||||
/________\
|
||||
/ \ Unit Tests (70%)
|
||||
/____________\
|
||||
```
|
||||
|
||||
### Coverage Goals
|
||||
|
||||
- **Backend**: 70%+ overall
|
||||
- Business Logic: 80%+
|
||||
- Controllers: 70%+
|
||||
- Utilities: 90%+
|
||||
- **Frontend**: 60%+ overall
|
||||
|
||||
---
|
||||
|
||||
## 📊 Monitoring & Observability
|
||||
|
||||
### Health Checks
|
||||
|
||||
```bash
|
||||
# Backend health
|
||||
curl http://localhost:3001/health
|
||||
|
||||
# Database health
|
||||
curl http://localhost:3001/health/db
|
||||
|
||||
# Redis health
|
||||
curl http://localhost:3001/health/redis
|
||||
```
|
||||
|
||||
### Metrics
|
||||
|
||||
- API Response Time
|
||||
- Error Rates
|
||||
- Cache Hit Ratio
|
||||
- Database Connection Pool
|
||||
- File Upload Performance
|
||||
|
||||
---
|
||||
|
||||
## 🚢 Deployment
|
||||
|
||||
### Production Deployment
|
||||
|
||||
```bash
|
||||
# Build backend
|
||||
cd backend
|
||||
pnpm run build
|
||||
|
||||
# Build frontend
|
||||
cd frontend
|
||||
pnpm run build
|
||||
|
||||
# Deploy with Docker Compose
|
||||
docker-compose -f docker-compose.yml up -d
|
||||
```
|
||||
|
||||
### Environment-specific Configs
|
||||
|
||||
- **Development**: `.env`, `docker-compose.override.yml`
|
||||
- **Staging**: Environment variables ใน Container Station
|
||||
- **Production**: Docker secrets หรือ Vault
|
||||
|
||||
---
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
กรุณาอ่าน [CONTRIBUTING.md](./CONTRIBUTING.md) สำหรับรายละเอียดเกี่ยวกับ:
|
||||
|
||||
- Code of Conduct
|
||||
- Development Process
|
||||
- Pull Request Process
|
||||
- Coding Standards
|
||||
|
||||
---
|
||||
|
||||
## 📝 License
|
||||
|
||||
This project is **Internal Use Only** - ลิขสิทธิ์เป็นของโครงการ LCBP3
|
||||
|
||||
---
|
||||
|
||||
## 👥 Team
|
||||
|
||||
- **Project Manager**: [์Nattanin Peancharoen]
|
||||
- **Tech Lead**: [Nattanin Peancharoen]
|
||||
- **Backend Team**: [Nattanin Peancharoen]
|
||||
- **Frontend Team**: [Nattanin Peancharoen]
|
||||
|
||||
---
|
||||
|
||||
## 📞 Support
|
||||
|
||||
สำหรับคำถามหรือปัญหา กรุณาติดต่อ:
|
||||
|
||||
- **Email**: <support@np-dms.work>
|
||||
- **Internal Chat**: [ระบุช่องทาง]
|
||||
- **Issue Tracker**: [Gitea Issues](https://git.np-dms.work/lcbp3/lcbp3-dms/issues)
|
||||
|
||||
---
|
||||
|
||||
## 🗺️ Roadmap
|
||||
|
||||
### Version 1.5.1 (Current - Dec 2025) ✅ **FEATURE COMPLETE**
|
||||
|
||||
**Backend (18 Modules - ~95%)**
|
||||
- ✅ Core Infrastructure (Auth, RBAC, File Storage)
|
||||
- ✅ Authentication & Authorization (JWT + CASL RBAC 4-Level)
|
||||
- ✅ Correspondence Module (Master-Revision Pattern)
|
||||
- ✅ RFA Module (Full CRUD + Workflow)
|
||||
- ✅ Drawing Module (Contract + Shop Drawings)
|
||||
- ✅ Workflow Engine (DSL-based Hybrid)
|
||||
- ✅ Document Numbering (Redlock + Optimistic Locking)
|
||||
- ✅ Search (Elasticsearch Direct Indexing)
|
||||
- ✅ Transmittal & Circulation Modules
|
||||
- ✅ Notification & Audit Systems
|
||||
- ✅ Master Data Management
|
||||
- ✅ User Management
|
||||
- ✅ Dashboard & Monitoring
|
||||
- ✅ Swagger API Documentation
|
||||
|
||||
**Frontend (15 Tasks - 100%)**
|
||||
- ✅ Complete UI Implementation (17 component groups)
|
||||
- ✅ All Business Modules (Correspondence, RFA, Drawings)
|
||||
- ✅ Admin Panel (10 routes including Workflow & Numbering Config)
|
||||
- ✅ Dashboard with Real-time Statistics
|
||||
- ✅ Advanced Search UI
|
||||
- ✅ RBAC Permission UI
|
||||
- ✅ Responsive Layout (Desktop & Mobile)
|
||||
|
||||
**Documentation**
|
||||
- ✅ Complete specs/ v1.6.0 (21 requirements, 17 ADRs)
|
||||
- ✅ Database Schema v1.6.0 with seed data
|
||||
- ✅ Implementation & Operations Guides
|
||||
|
||||
### Version 1.7.0 (Planned - Q1 2026)
|
||||
|
||||
**Production Enhancements**
|
||||
- 📋 E2E Test Coverage (Playwright/Cypress)
|
||||
- 📊 Advanced Reporting & Analytics Dashboard
|
||||
- 🔔 Enhanced Notifications (Real-time WebSocket)
|
||||
- 📈 Prometheus Metrics & Grafana Dashboards
|
||||
- 🔍 Queue-based Elasticsearch Indexing
|
||||
- 🚀 Performance Optimization & Caching Strategy
|
||||
- 📱 Mobile App (React Native)
|
||||
|
||||
**Optional Improvements**
|
||||
- 🤖 AI-powered Document Classification
|
||||
- 📧 Advanced Email Templates
|
||||
- 🔐 SSO Integration (LDAP/Active Directory)
|
||||
- 📦 Bulk Operations & Import/Export Tools
|
||||
|
||||
---
|
||||
|
||||
## 📖 Additional Resources
|
||||
|
||||
### API Documentation
|
||||
|
||||
- Swagger UI: `http://localhost:3001/api`
|
||||
- Postman Collection: [ดาวน์โหลด](./docs/postman/)
|
||||
|
||||
### Architecture Diagrams
|
||||
|
||||
- [System Architecture](./diagrams/system-architecture.md)
|
||||
- [Database ERD](./diagrams/database-erd.md)
|
||||
- [Workflow Engine](./diagrams/workflow-engine.md)
|
||||
|
||||
### Learning Resources
|
||||
|
||||
- [NestJS Documentation](https://docs.nestjs.com/)
|
||||
- [Next.js Documentation](https://nextjs.org/docs)
|
||||
- [TypeORM Documentation](https://typeorm.io/)
|
||||
|
||||
---
|
||||
|
||||
<div align="center">
|
||||
|
||||
**Built with ❤️ for LCBP3 Project**
|
||||
|
||||
[Documentation](./docs) • [Issues](https://git.np-dms.work/lcbp3/lcbp3-dms/issues) • [Changelog](./CHANGELOG.md)
|
||||
|
||||
</div>
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all"
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
# File: Dockerfile
|
||||
# บันทึกการแก้ไข: (สร้างไฟล์)
|
||||
|
||||
# --- STAGE 1: Builder ---
|
||||
# ติดตั้ง Dependencies และ Build โค้ด
|
||||
FROM node:18-alpine AS builder
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# Copy package.json และ lock file
|
||||
COPY package*.json ./
|
||||
|
||||
# ติดตั้ง Dependencies (สำหรับ Build)
|
||||
RUN npm install
|
||||
|
||||
# Copy source code ทั้งหมด
|
||||
COPY . .
|
||||
|
||||
# Build application
|
||||
RUN npm run build
|
||||
|
||||
# ติดตั้งเฉพาะ Production Dependencies (สำหรับ Stage สุดท้าย)
|
||||
RUN npm prune --production
|
||||
|
||||
# --- STAGE 2: Runner ---
|
||||
# Image สุดท้ายที่มีขนาดเล็ก
|
||||
FROM node:18-alpine
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
# (Security) สร้าง User ที่ไม่มีสิทธิ์ Root
|
||||
RUN addgroup -S nestjs && adduser -S nestjs -G nestjs
|
||||
USER nestjs
|
||||
|
||||
# Copy Production Dependencies (จาก Stage 1)
|
||||
COPY --from=builder /usr/src/app/node_modules ./node_modules
|
||||
|
||||
# Copy Build Artifacts (จาก Stage 1)
|
||||
COPY --from=builder /usr/src/app/dist ./dist
|
||||
|
||||
# Copy package.json (เผื่อจำเป็น)
|
||||
COPY package*.json ./
|
||||
|
||||
# เปิด Port (อ่านจาก Environment Variable)
|
||||
EXPOSE ${PORT:-3000}
|
||||
|
||||
# รัน Application
|
||||
CMD [ "node", "dist/main" ]
|
||||
@@ -1,98 +0,0 @@
|
||||
<p align="center">
|
||||
<a href="http://nestjs.com/" target="blank"><img src="https://nestjs.com/img/logo-small.svg" width="120" alt="Nest Logo" /></a>
|
||||
</p>
|
||||
|
||||
[circleci-image]: https://img.shields.io/circleci/build/github/nestjs/nest/master?token=abc123def456
|
||||
[circleci-url]: https://circleci.com/gh/nestjs/nest
|
||||
|
||||
<p align="center">A progressive <a href="http://nodejs.org" target="_blank">Node.js</a> framework for building efficient and scalable server-side applications.</p>
|
||||
<p align="center">
|
||||
<a href="https://www.npmjs.com/~nestjscore" target="_blank"><img src="https://img.shields.io/npm/v/@nestjs/core.svg" alt="NPM Version" /></a>
|
||||
<a href="https://www.npmjs.com/~nestjscore" target="_blank"><img src="https://img.shields.io/npm/l/@nestjs/core.svg" alt="Package License" /></a>
|
||||
<a href="https://www.npmjs.com/~nestjscore" target="_blank"><img src="https://img.shields.io/npm/dm/@nestjs/common.svg" alt="NPM Downloads" /></a>
|
||||
<a href="https://circleci.com/gh/nestjs/nest" target="_blank"><img src="https://img.shields.io/circleci/build/github/nestjs/nest/master" alt="CircleCI" /></a>
|
||||
<a href="https://discord.gg/G7Qnnhy" target="_blank"><img src="https://img.shields.io/badge/discord-online-brightgreen.svg" alt="Discord"/></a>
|
||||
<a href="https://opencollective.com/nest#backer" target="_blank"><img src="https://opencollective.com/nest/backers/badge.svg" alt="Backers on Open Collective" /></a>
|
||||
<a href="https://opencollective.com/nest#sponsor" target="_blank"><img src="https://opencollective.com/nest/sponsors/badge.svg" alt="Sponsors on Open Collective" /></a>
|
||||
<a href="https://paypal.me/kamilmysliwiec" target="_blank"><img src="https://img.shields.io/badge/Donate-PayPal-ff3f59.svg" alt="Donate us"/></a>
|
||||
<a href="https://opencollective.com/nest#sponsor" target="_blank"><img src="https://img.shields.io/badge/Support%20us-Open%20Collective-41B883.svg" alt="Support us"></a>
|
||||
<a href="https://twitter.com/nestframework" target="_blank"><img src="https://img.shields.io/twitter/follow/nestframework.svg?style=social&label=Follow" alt="Follow us on Twitter"></a>
|
||||
</p>
|
||||
<!--[](https://opencollective.com/nest#backer)
|
||||
[](https://opencollective.com/nest#sponsor)-->
|
||||
|
||||
## Description
|
||||
|
||||
[Nest](https://github.com/nestjs/nest) framework TypeScript starter repository.
|
||||
|
||||
## Project setup
|
||||
|
||||
```bash
|
||||
$ npm install
|
||||
```
|
||||
|
||||
## Compile and run the project
|
||||
|
||||
```bash
|
||||
# development
|
||||
$ npm run start
|
||||
|
||||
# watch mode
|
||||
$ npm run start:dev
|
||||
|
||||
# production mode
|
||||
$ npm run start:prod
|
||||
```
|
||||
|
||||
## Run tests
|
||||
|
||||
```bash
|
||||
# unit tests
|
||||
$ npm run test
|
||||
|
||||
# e2e tests
|
||||
$ npm run test:e2e
|
||||
|
||||
# test coverage
|
||||
$ npm run test:cov
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
When you're ready to deploy your NestJS application to production, there are some key steps you can take to ensure it runs as efficiently as possible. Check out the [deployment documentation](https://docs.nestjs.com/deployment) for more information.
|
||||
|
||||
If you are looking for a cloud-based platform to deploy your NestJS application, check out [Mau](https://mau.nestjs.com), our official platform for deploying NestJS applications on AWS. Mau makes deployment straightforward and fast, requiring just a few simple steps:
|
||||
|
||||
```bash
|
||||
$ npm install -g @nestjs/mau
|
||||
$ mau deploy
|
||||
```
|
||||
|
||||
With Mau, you can deploy your application in just a few clicks, allowing you to focus on building features rather than managing infrastructure.
|
||||
|
||||
## Resources
|
||||
|
||||
Check out a few resources that may come in handy when working with NestJS:
|
||||
|
||||
- Visit the [NestJS Documentation](https://docs.nestjs.com) to learn more about the framework.
|
||||
- For questions and support, please visit our [Discord channel](https://discord.gg/G7Qnnhy).
|
||||
- To dive deeper and get more hands-on experience, check out our official video [courses](https://courses.nestjs.com/).
|
||||
- Deploy your application to AWS with the help of [NestJS Mau](https://mau.nestjs.com) in just a few clicks.
|
||||
- Visualize your application graph and interact with the NestJS application in real-time using [NestJS Devtools](https://devtools.nestjs.com).
|
||||
- Need help with your project (part-time to full-time)? Check out our official [enterprise support](https://enterprise.nestjs.com).
|
||||
- To stay in the loop and get updates, follow us on [X](https://x.com/nestframework) and [LinkedIn](https://linkedin.com/company/nestjs).
|
||||
- Looking for a job, or have a job to offer? Check out our official [Jobs board](https://jobs.nestjs.com).
|
||||
|
||||
## Support
|
||||
|
||||
Nest is an MIT-licensed open source project. It can grow thanks to the sponsors and support by the amazing backers. If you'd like to join them, please [read more here](https://docs.nestjs.com/support).
|
||||
|
||||
## Stay in touch
|
||||
|
||||
- Author - [Kamil Myśliwiec](https://twitter.com/kammysliwiec)
|
||||
- Website - [https://nestjs.com](https://nestjs.com/)
|
||||
- Twitter - [@nestframework](https://twitter.com/nestframework)
|
||||
|
||||
## License
|
||||
|
||||
Nest is [MIT licensed](https://github.com/nestjs/nest/blob/master/LICENSE).
|
||||
@@ -1,62 +0,0 @@
|
||||
# File: docker-compose.yml
|
||||
# บันทึกการแก้ไข: (สร้างไฟล์)
|
||||
# (สำคัญ: ไฟล์นี้จะถูก import หรือคัดลอกไปใส่ใน UI ของ QNAP Container Station)
|
||||
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# ---------------------------------
|
||||
# Service 1: Backend (NestJS)
|
||||
# (Req 2.3)
|
||||
# ---------------------------------
|
||||
backend:
|
||||
build:
|
||||
context: ./backend # (สมมติว่า Dockerfile อยู่ในโฟลเดอร์ backend)
|
||||
dockerfile: Dockerfile
|
||||
image: lcbp3-backend:1.3.0 # (ตั้งชื่อ Image)
|
||||
container_name: lcbp3-backend
|
||||
restart: unless-stopped
|
||||
|
||||
# (สำคัญ) กำหนด Environment Variables ที่นี่ (ห้ามใช้ .env)
|
||||
# (Req 6.5, 2.1)
|
||||
environment:
|
||||
# --- App Config ---
|
||||
- PORT=3000
|
||||
- NODE_ENV=production
|
||||
|
||||
# --- Database (Req 2.4) ---
|
||||
# (ชี้ไปที่ Service 'mariadb' ใน Network 'lcbp3')
|
||||
- DATABASE_HOST=mariadb
|
||||
- DATABASE_PORT=3306
|
||||
- DATABASE_USER=your_db_user # (ต้องเปลี่ยน)
|
||||
- DATABASE_PASSWORD=your_db_pass # (ต้องเปลี่ยน)
|
||||
- DATABASE_NAME=lcbp3_dms
|
||||
|
||||
# --- Security (JWT) (Req 6.5) ---
|
||||
- JWT_SECRET=YOUR_VERY_STRONG_JWT_SECRET_KEY # (ต้องเปลี่ยน)
|
||||
- JWT_EXPIRATION_TIME=3600s # (เช่น 1 ชั่วโมง)
|
||||
|
||||
# --- Phase 4 Services ---
|
||||
- ELASTICSEARCH_URL=http://elasticsearch:9200 # (ชี้ไปที่ Service ES ถ้ามี)
|
||||
- N8N_WEBHOOK_URL=http://n8n:5678/webhook/your-webhook-id # (ชี้ไปที่ N8N)
|
||||
|
||||
# (สำคัญ) เชื่อมต่อ Network กลาง (Req 2.1)
|
||||
networks:
|
||||
- lcbp3
|
||||
|
||||
# (Deploy) ตั้งค่า Health Check
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:3000/api/v1/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 20s # (รอให้ App เริ่มก่อน)
|
||||
|
||||
# ---------------------------------
|
||||
# Network กลาง (Req 2.1)
|
||||
# (ต้องสร้าง Network นี้ไว้ก่อนใน QNAP หรือสร้างพร้อมกัน)
|
||||
# ---------------------------------
|
||||
networks:
|
||||
lcbp3:
|
||||
external: true # (ถ้าสร้างไว้แล้ว)
|
||||
# name: lcbp3 # (ถ้าต้องการให้ Compose สร้าง)
|
||||
@@ -1,35 +0,0 @@
|
||||
// @ts-check
|
||||
import eslint from '@eslint/js';
|
||||
import eslintPluginPrettierRecommended from 'eslint-plugin-prettier/recommended';
|
||||
import globals from 'globals';
|
||||
import tseslint from 'typescript-eslint';
|
||||
|
||||
export default tseslint.config(
|
||||
{
|
||||
ignores: ['eslint.config.mjs'],
|
||||
},
|
||||
eslint.configs.recommended,
|
||||
...tseslint.configs.recommendedTypeChecked,
|
||||
eslintPluginPrettierRecommended,
|
||||
{
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node,
|
||||
...globals.jest,
|
||||
},
|
||||
sourceType: 'commonjs',
|
||||
parserOptions: {
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
rules: {
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-floating-promises': 'warn',
|
||||
'@typescript-eslint/no-unsafe-argument': 'warn',
|
||||
"prettier/prettier": ["error", { endOfLine: "auto" }],
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/nest-cli",
|
||||
"collection": "@nestjs/schematics",
|
||||
"sourceRoot": "src",
|
||||
"compilerOptions": {
|
||||
"deleteOutDir": true
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"version": "0.0.1",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
"license": "UNLICENSED",
|
||||
"scripts": {
|
||||
"build": "nest build",
|
||||
"format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
|
||||
"start": "nest start",
|
||||
"start:dev": "nest start --watch",
|
||||
"start:debug": "nest start --debug --watch",
|
||||
"start:prod": "node dist/main",
|
||||
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch",
|
||||
"test:cov": "jest --coverage",
|
||||
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json"
|
||||
},
|
||||
"dependencies": {
|
||||
"@elastic/elasticsearch": "^9.2.0",
|
||||
"@nestjs/cache-manager": "^3.0.1",
|
||||
"@nestjs/common": "^11.0.1",
|
||||
"@nestjs/config": "^4.0.2",
|
||||
"@nestjs/core": "^11.0.1",
|
||||
"@nestjs/elasticsearch": "^11.1.0",
|
||||
"@nestjs/jwt": "^11.0.1",
|
||||
"@nestjs/passport": "^11.0.5",
|
||||
"@nestjs/platform-express": "^11.1.9",
|
||||
"@nestjs/schedule": "^6.0.1",
|
||||
"@nestjs/swagger": "^11.2.1",
|
||||
"@nestjs/typeorm": "^11.0.0",
|
||||
"@types/nodemailer": "^7.0.3",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"bcrypt": "^6.0.0",
|
||||
"cache-manager": "^7.2.4",
|
||||
"casl": "^0.2.0",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.2",
|
||||
"helmet": "^8.1.0",
|
||||
"multer": "^2.0.2",
|
||||
"mysql2": "^3.15.3",
|
||||
"nodemailer": "^7.0.10",
|
||||
"passport": "^0.7.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"rate-limiter-flexible": "^8.2.1",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"typeorm": "^0.3.27",
|
||||
"uuid": "^13.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "^9.18.0",
|
||||
"@nestjs/cli": "^11.0.0",
|
||||
"@nestjs/schematics": "^11.0.0",
|
||||
"@nestjs/testing": "^11.1.9",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^22.10.7",
|
||||
"@types/passport-jwt": "^4.0.1",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"eslint": "^9.18.0",
|
||||
"eslint-config-prettier": "^10.0.1",
|
||||
"eslint-plugin-prettier": "^5.2.2",
|
||||
"globals": "^16.0.0",
|
||||
"jest": "^30.2.0",
|
||||
"prettier": "^3.4.2",
|
||||
"source-map-support": "^0.5.21",
|
||||
"supertest": "^7.1.4",
|
||||
"ts-jest": "^29.2.5",
|
||||
"ts-loader": "^9.5.2",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"typescript": "^5.7.3",
|
||||
"typescript-eslint": "^8.20.0"
|
||||
},
|
||||
"jest": {
|
||||
"moduleFileExtensions": [
|
||||
"js",
|
||||
"json",
|
||||
"ts"
|
||||
],
|
||||
"rootDir": "src",
|
||||
"testRegex": ".*\\.spec\\.ts$",
|
||||
"transform": {
|
||||
"^.+\\.(t|j)s$": "ts-jest"
|
||||
},
|
||||
"collectCoverageFrom": [
|
||||
"**/*.(t|j)s"
|
||||
],
|
||||
"coverageDirectory": "../coverage",
|
||||
"testEnvironment": "node"
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { AppController } from './app.controller';
|
||||
import { AppService } from './app.service';
|
||||
|
||||
describe('AppController', () => {
|
||||
let appController: AppController;
|
||||
|
||||
beforeEach(async () => {
|
||||
const app: TestingModule = await Test.createTestingModule({
|
||||
controllers: [AppController],
|
||||
providers: [AppService],
|
||||
}).compile();
|
||||
|
||||
appController = app.get<AppController>(AppController);
|
||||
});
|
||||
|
||||
describe('root', () => {
|
||||
it('should return "Hello World!"', () => {
|
||||
expect(appController.getHello()).toBe('Hello World!');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,12 +0,0 @@
|
||||
import { Controller, Get } from '@nestjs/common';
|
||||
import { AppService } from './app.service';
|
||||
|
||||
@Controller()
|
||||
export class AppController {
|
||||
constructor(private readonly appService: AppService) {}
|
||||
|
||||
@Get()
|
||||
getHello(): string {
|
||||
return this.appService.getHello();
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { AppController } from './app.controller';
|
||||
import { AppService } from './app.service';
|
||||
|
||||
@Module({
|
||||
imports: [],
|
||||
controllers: [AppController],
|
||||
providers: [AppService],
|
||||
})
|
||||
export class AppModule {}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
|
||||
@Injectable()
|
||||
export class AppService {
|
||||
getHello(): string {
|
||||
return 'Hello World!';
|
||||
}
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { NestFactory } from '@nestjs/core';
|
||||
import { AppModule } from './app.module';
|
||||
|
||||
async function bootstrap() {
|
||||
const app = await NestFactory.create(AppModule);
|
||||
await app.listen(process.env.PORT ?? 3000);
|
||||
}
|
||||
bootstrap();
|
||||
@@ -1,25 +0,0 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { INestApplication } from '@nestjs/common';
|
||||
import request from 'supertest';
|
||||
import { App } from 'supertest/types';
|
||||
import { AppModule } from './../src/app.module';
|
||||
|
||||
describe('AppController (e2e)', () => {
|
||||
let app: INestApplication<App>;
|
||||
|
||||
beforeEach(async () => {
|
||||
const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
imports: [AppModule],
|
||||
}).compile();
|
||||
|
||||
app = moduleFixture.createNestApplication();
|
||||
await app.init();
|
||||
});
|
||||
|
||||
it('/ (GET)', () => {
|
||||
return request(app.getHttpServer())
|
||||
.get('/')
|
||||
.expect(200)
|
||||
.expect('Hello World!');
|
||||
});
|
||||
});
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"moduleFileExtensions": ["js", "json", "ts"],
|
||||
"rootDir": ".",
|
||||
"testEnvironment": "node",
|
||||
"testRegex": ".e2e-spec.ts$",
|
||||
"transform": {
|
||||
"^.+\\.(t|j)s$": "ts-jest"
|
||||
}
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"exclude": ["node_modules", "test", "dist", "**/*spec.ts"]
|
||||
}
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "nodenext",
|
||||
"moduleResolution": "nodenext",
|
||||
"resolvePackageJsonExports": true,
|
||||
"esModuleInterop": true,
|
||||
"isolatedModules": true,
|
||||
"declaration": true,
|
||||
"removeComments": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"experimentalDecorators": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"target": "ES2023",
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"baseUrl": "./",
|
||||
"incremental": true,
|
||||
"skipLibCheck": true,
|
||||
"strictNullChecks": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noImplicitAny": false,
|
||||
"strictBindCallApply": false,
|
||||
"noFallthroughCasesInSwitch": false
|
||||
}
|
||||
}
|
||||
12
backend/.editorconfig
Normal file
12
backend/.editorconfig
Normal file
@@ -0,0 +1,12 @@
|
||||
# EditorConfig is awesome: https://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
end_of_line = crlf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = false
|
||||
insert_final_newline = false
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all"
|
||||
"semi": true,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"trailingComma": "es5",
|
||||
"printWidth": 80,
|
||||
"arrowParens": "always",
|
||||
"endOfLine": "lf",
|
||||
"bracketSpacing": true,
|
||||
"jsxSingleQuote": false
|
||||
}
|
||||
|
||||
76
backend/Infrastructure Setup.yml
Normal file
76
backend/Infrastructure Setup.yml
Normal file
@@ -0,0 +1,76 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
# ---------------------------------------------------------------------------
|
||||
# Redis Service
|
||||
# ใช้สำหรับ: Caching ข้อมูล, Session Store และ Message Queue (สำหรับ NestJS/BullMQ)
|
||||
# ---------------------------------------------------------------------------
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: lcbp3_redis
|
||||
restart: always
|
||||
command: redis-server --save 60 1 --loglevel warning --requirepass "${REDIS_PASSWORD:-redis_password}"
|
||||
ports:
|
||||
- "6379:6379"
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
networks:
|
||||
- lcbp3_net
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "-a", "${REDIS_PASSWORD:-redis_password}", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Elasticsearch Service
|
||||
# ใช้สำหรับ: Full-text Search และการวิเคราะห์ข้อมูล (Database Analysis)
|
||||
# ---------------------------------------------------------------------------
|
||||
elasticsearch:
|
||||
image: elasticsearch:8.11.1
|
||||
container_name: lcbp3_elasticsearch
|
||||
restart: always
|
||||
environment:
|
||||
- node.name=lcbp3_es01
|
||||
- cluster.name=lcbp3_es_cluster
|
||||
- discovery.type=single-node # รันแบบ Node เดียวสำหรับ Dev/Phase 0
|
||||
- bootstrap.memory_lock=true # ล็อคหน่วยความจำเพื่อประสิทธิภาพ
|
||||
- "ES_JAVA_OPTS=-Xms512m -Xmx512m" # กำหนด Heap Size (ปรับเพิ่มได้ตาม Resource เครื่อง)
|
||||
- xpack.security.enabled=false # ปิด Security ชั่วคราวสำหรับ Phase 0 (ควรเปิดใน Production)
|
||||
- xpack.security.http.ssl.enabled=false
|
||||
ulimits:
|
||||
memlock:
|
||||
soft: -1
|
||||
hard: -1
|
||||
volumes:
|
||||
- es_data:/usr/share/elasticsearch/data
|
||||
ports:
|
||||
- "9200:9200"
|
||||
networks:
|
||||
- lcbp3_net
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -f http://localhost:9200/_cluster/health || exit 1"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Volumes Configuration
|
||||
# การจัดการพื้นที่จัดเก็บข้อมูลแบบ Persistent Data
|
||||
# ---------------------------------------------------------------------------
|
||||
volumes:
|
||||
redis_data:
|
||||
driver: local
|
||||
name: lcbp3_redis_vol
|
||||
es_data:
|
||||
driver: local
|
||||
name: lcbp3_es_vol
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Networks Configuration
|
||||
# เครือข่ายสำหรับเชื่อมต่อ Container ภายใน
|
||||
# ---------------------------------------------------------------------------
|
||||
networks:
|
||||
lcbp3_net:
|
||||
driver: bridge
|
||||
name: lcbp3_network
|
||||
72
backend/build-output.txt
Normal file
72
backend/build-output.txt
Normal file
@@ -0,0 +1,72 @@
|
||||
|
||||
> backend@1.5.1 build
|
||||
> nest build
|
||||
|
||||
documentation/template-playground/hbs-render.service.ts:1:28 - error TS2307: Cannot find module '@angular/core' or its corresponding type declarations.
|
||||
|
||||
1 import { Injectable } from '@angular/core';
|
||||
~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/hbs-render.service.ts:175:42 - error TS18046: 'error' is of type 'unknown'.
|
||||
|
||||
175 <p><strong>Error:</strong> ${error.message}</p>
|
||||
~~~~~
|
||||
documentation/template-playground/main.ts:1:40 - error TS2307: Cannot find module '@angular/platform-browser-dynamic' or its corresponding type declarations.
|
||||
|
||||
1 import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/main.ts:8:12 - error TS7006: Parameter 'err' implicitly has an 'any' type.
|
||||
|
||||
8 .catch(err => console.error('Error starting template playground:', err));
|
||||
~~~
|
||||
documentation/template-playground/template-editor.service.ts:1:28 - error TS2307: Cannot find module '@angular/core' or its corresponding type declarations.
|
||||
|
||||
1 import { Injectable } from '@angular/core';
|
||||
~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/template-playground.component.ts:1:69 - error TS2307: Cannot find module '@angular/core' or its corresponding type declarations.
|
||||
|
||||
1 import { Component, OnInit, ViewChild, ElementRef, OnDestroy } from '@angular/core';
|
||||
~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/template-playground.component.ts:2:28 - error TS2307: Cannot find module '@angular/common/http' or its corresponding type declarations.
|
||||
|
||||
2 import { HttpClient } from '@angular/common/http';
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/template-playground.module.ts:1:26 - error TS2307: Cannot find module '@angular/core' or its corresponding type declarations.
|
||||
|
||||
1 import { NgModule } from '@angular/core';
|
||||
~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/template-playground.module.ts:2:31 - error TS2307: Cannot find module '@angular/platform-browser' or its corresponding type declarations.
|
||||
|
||||
2 import { BrowserModule } from '@angular/platform-browser';
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/template-playground.module.ts:3:30 - error TS2307: Cannot find module '@angular/common' or its corresponding type declarations.
|
||||
|
||||
3 import { CommonModule } from '@angular/common';
|
||||
~~~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/template-playground.module.ts:4:29 - error TS2307: Cannot find module '@angular/forms' or its corresponding type declarations.
|
||||
|
||||
4 import { FormsModule } from '@angular/forms';
|
||||
~~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/template-playground.module.ts:5:34 - error TS2307: Cannot find module '@angular/common/http' or its corresponding type declarations.
|
||||
|
||||
5 import { HttpClientModule } from '@angular/common/http';
|
||||
~~~~~~~~~~~~~~~~~~~~~~
|
||||
documentation/template-playground/zip-export.service.ts:1:28 - error TS2307: Cannot find module '@angular/core' or its corresponding type declarations.
|
||||
|
||||
1 import { Injectable } from '@angular/core';
|
||||
~~~~~~~~~~~~~~~
|
||||
src/modules/rfa/rfa.service.ts:422:11 - error TS2339: Property 'returnToSequence' does not exist on type 'WorkflowActionDto'.
|
||||
|
||||
422 dto.returnToSequence
|
||||
~~~~~~~~~~~~~~~~
|
||||
src/modules/rfa/rfa.service.ts:435:37 - error TS2551: Property 'comments' does not exist on type 'WorkflowActionDto'. Did you mean 'comment'?
|
||||
|
||||
435 currentRouting.comments = dto.comments;
|
||||
~~~~~~~~
|
||||
|
||||
src/modules/correspondence/dto/workflow-action.dto.ts:29:3
|
||||
29 comment?: string;
|
||||
~~~~~~~
|
||||
'comment' is declared here.
|
||||
|
||||
Found 15 error(s).
|
||||
|
||||
1416
backend/doc-output.txt
Normal file
1416
backend/doc-output.txt
Normal file
File diff suppressed because it is too large
Load Diff
30
backend/docker-compose.test.yml
Normal file
30
backend/docker-compose.test.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
services:
|
||||
mariadb_test:
|
||||
image: mariadb:11.8
|
||||
container_name: mariadb-test
|
||||
restart: always
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: Center#2025
|
||||
MYSQL_DATABASE: lcbp3_test
|
||||
MYSQL_USER: admin
|
||||
MYSQL_PASSWORD: Center2025
|
||||
ports:
|
||||
- '3307:3306'
|
||||
tmpfs:
|
||||
- /var/lib/mysql
|
||||
networks:
|
||||
- lcbp3-test-net
|
||||
|
||||
redis_test:
|
||||
image: redis:7-alpine
|
||||
container_name: redis-test
|
||||
restart: always
|
||||
command: redis-server --requirepass "Center2025"
|
||||
ports:
|
||||
- '6380:6379'
|
||||
networks:
|
||||
- lcbp3-test-net
|
||||
|
||||
networks:
|
||||
lcbp3-test-net:
|
||||
driver: bridge
|
||||
@@ -41,10 +41,24 @@ services:
|
||||
networks:
|
||||
- lcbp3-net
|
||||
|
||||
elasticsearch:
|
||||
image: docker.elastic.co/elasticsearch/elasticsearch:8.11.1
|
||||
container_name: lcbp3-elasticsearch
|
||||
environment:
|
||||
- discovery.type=single-node
|
||||
- xpack.security.enabled=false # ปิด security เพื่อความง่ายใน Dev (Prod ต้องเปิด)
|
||||
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
|
||||
ports:
|
||||
- "9200:9200"
|
||||
volumes:
|
||||
- esdata:/usr/share/elasticsearch/data
|
||||
networks:
|
||||
- lcbp3-net
|
||||
volumes:
|
||||
db_data:
|
||||
redis_data: # เพิ่ม Volume
|
||||
|
||||
esdata:
|
||||
|
||||
networks:
|
||||
lcbp3-net:
|
||||
driver: bridge
|
||||
421
backend/e2e-output.txt
Normal file
421
backend/e2e-output.txt
Normal file
@@ -0,0 +1,421 @@
|
||||
|
||||
> backend@1.5.1 test:e2e
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
[Nest] 13440 - 12/09/2025, 8:34:55 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (1)...
|
||||
AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17)
|
||||
[Nest] 12240 - 12/09/2025, 8:34:55 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (1)...
|
||||
AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17)
|
||||
[Nest] 41780 - 12/09/2025, 8:34:55 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (1)...
|
||||
AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17)
|
||||
|
||||
ΓùÅ Cannot log after tests are done. Did you forget to wait for something async in your test?
|
||||
Attempted to log "AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}".
|
||||
|
||||
at TCPConnectWrap.callbackTrampoline (../node:internal/async_hooks:130:17) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at TCPConnectWrap.callbackTrampoline (../node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at TCPConnectWrap.callbackTrampoline (../node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}".
|
||||
at console.error (../node_modules/@jest/console/build/index.js:124:10)
|
||||
at Queue.emit (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue-base.ts:129:17)
|
||||
at Queue.emit (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue.ts:192:18)
|
||||
at RedisConnection.<anonymous> (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue-base.ts:75:56)
|
||||
at EventEmitter.RedisConnection.handleClientError (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/redis-connection.ts:121:12)
|
||||
at EventEmitter.silentEmit (../../node_modules/.pnpm/ioredis@5.8.2/node_modules/ioredis/built/Redis.js:484:30)
|
||||
at Socket.<anonymous> (../../node_modules/.pnpm/ioredis@5.8.2/node_modules/ioredis/built/redis/event_handler.js:221:14)
|
||||
|
||||
FAIL test/app.e2e-spec.ts (7.608 s)
|
||||
ΓùÅ Console
|
||||
|
||||
console.error
|
||||
Redis Connection Error: AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
72 | imports: [ConfigModule],
|
||||
73 | useFactory: async (configService: ConfigService) => ({
|
||||
> 74 | store: await redisStore({
|
||||
| ^
|
||||
75 | socket: {
|
||||
76 | host: configService.get<string>('redis.host'),
|
||||
77 | port: configService.get<number>('redis.port'),
|
||||
|
||||
at redisStore (../../node_modules/.pnpm/cache-manager-redis-yet@5.1.5/node_modules/cache-manager-redis-yet/dist/index.js:101:17)
|
||||
at InstanceWrapper.useFactory [as metatype] (../src/app.module.ts:74:16)
|
||||
at TestingInjector.instantiateClass (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:424:37)
|
||||
at callback (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:70:34)
|
||||
at TestingInjector.resolveConstructorParams (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:170:24)
|
||||
at TestingInjector.loadInstance (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:75:13)
|
||||
at TestingInjector.loadProvider (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:103:9)
|
||||
at ../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:56:13
|
||||
at async Promise.all (index 5)
|
||||
at TestingInstanceLoader.createInstancesOfProviders (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:55:9)
|
||||
at ../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:40:13
|
||||
at async Promise.all (index 6)
|
||||
at TestingInstanceLoader.createInstances (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:39:9)
|
||||
at TestingInstanceLoader.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:22:13)
|
||||
at TestingInstanceLoader.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-instance-loader.js:9:9)
|
||||
at TestingModuleBuilder.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-module.builder.js:118:9)
|
||||
at TestingModuleBuilder.compile (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-module.builder.js:74:9)
|
||||
at Object.<anonymous> (app.e2e-spec.ts:11:42)
|
||||
|
||||
● AppController (e2e) › / (GET)
|
||||
|
||||
AggregateError:
|
||||
|
||||
|
||||
|
||||
ΓùÅ Cannot log after tests are done. Did you forget to wait for something async in your test?
|
||||
Attempted to log "AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}".
|
||||
|
||||
at TCPConnectWrap.callbackTrampoline (../node:internal/async_hooks:130:17) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at TCPConnectWrap.callbackTrampoline (../node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at TCPConnectWrap.callbackTrampoline (../node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}".
|
||||
at console.error (../node_modules/@jest/console/build/index.js:124:10)
|
||||
at Queue.emit (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue-base.ts:129:17)
|
||||
at Queue.emit (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue.ts:192:18)
|
||||
at RedisConnection.<anonymous> (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue-base.ts:75:56)
|
||||
at EventEmitter.RedisConnection.handleClientError (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/redis-connection.ts:121:12)
|
||||
at EventEmitter.silentEmit (../../node_modules/.pnpm/ioredis@5.8.2/node_modules/ioredis/built/Redis.js:484:30)
|
||||
at Socket.<anonymous> (../../node_modules/.pnpm/ioredis@5.8.2/node_modules/ioredis/built/redis/event_handler.js:221:14)
|
||||
|
||||
FAIL test/simple.e2e-spec.ts (7.616 s)
|
||||
ΓùÅ Console
|
||||
|
||||
console.error
|
||||
Redis Connection Error: AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
72 | imports: [ConfigModule],
|
||||
73 | useFactory: async (configService: ConfigService) => ({
|
||||
> 74 | store: await redisStore({
|
||||
| ^
|
||||
75 | socket: {
|
||||
76 | host: configService.get<string>('redis.host'),
|
||||
77 | port: configService.get<number>('redis.port'),
|
||||
|
||||
at redisStore (../../node_modules/.pnpm/cache-manager-redis-yet@5.1.5/node_modules/cache-manager-redis-yet/dist/index.js:101:17)
|
||||
at InstanceWrapper.useFactory [as metatype] (../src/app.module.ts:74:16)
|
||||
at TestingInjector.instantiateClass (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:424:37)
|
||||
at callback (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:70:34)
|
||||
at TestingInjector.resolveConstructorParams (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:170:24)
|
||||
at TestingInjector.loadInstance (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:75:13)
|
||||
at TestingInjector.loadProvider (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:103:9)
|
||||
at ../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:56:13
|
||||
at async Promise.all (index 5)
|
||||
at TestingInstanceLoader.createInstancesOfProviders (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:55:9)
|
||||
at ../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:40:13
|
||||
at async Promise.all (index 6)
|
||||
at TestingInstanceLoader.createInstances (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:39:9)
|
||||
at TestingInstanceLoader.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:22:13)
|
||||
at TestingInstanceLoader.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-instance-loader.js:9:9)
|
||||
at TestingModuleBuilder.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-module.builder.js:118:9)
|
||||
at TestingModuleBuilder.compile (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-module.builder.js:74:9)
|
||||
at Object.<anonymous> (simple.e2e-spec.ts:9:42)
|
||||
|
||||
● Simple Test › should pass
|
||||
|
||||
AggregateError:
|
||||
|
||||
|
||||
|
||||
ΓùÅ Cannot log after tests are done. Did you forget to wait for something async in your test?
|
||||
Attempted to log "AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16)
|
||||
at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}".
|
||||
|
||||
at TCPConnectWrap.callbackTrampoline (../node:internal/async_hooks:130:17) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at afterConnectMultiple (../node:net:1708:16) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at TCPConnectWrap.callbackTrampoline (../node:internal/async_hooks:130:17) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}".
|
||||
at console.error (../node_modules/@jest/console/build/index.js:124:10)
|
||||
at Queue.emit (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue-base.ts:129:17)
|
||||
at Queue.emit (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue.ts:192:18)
|
||||
at RedisConnection.<anonymous> (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/queue-base.ts:75:56)
|
||||
at EventEmitter.RedisConnection.handleClientError (../../node_modules/.pnpm/bullmq@5.65.0/node_modules/bullmq/src/classes/redis-connection.ts:121:12)
|
||||
at EventEmitter.silentEmit (../../node_modules/.pnpm/ioredis@5.8.2/node_modules/ioredis/built/Redis.js:484:30)
|
||||
at Socket.<anonymous> (../../node_modules/.pnpm/ioredis@5.8.2/node_modules/ioredis/built/redis/event_handler.js:221:14)
|
||||
|
||||
FAIL test/phase3-workflow.e2e-spec.ts (7.637 s)
|
||||
ΓùÅ Console
|
||||
|
||||
console.error
|
||||
Redis Connection Error: AggregateError:
|
||||
at internalConnectMultiple (node:net:1134:18)
|
||||
at afterConnectMultiple (node:net:1715:7) {
|
||||
code: 'ECONNREFUSED',
|
||||
[errors]: [
|
||||
Error: connect ECONNREFUSED ::1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '::1',
|
||||
port: 6379
|
||||
},
|
||||
Error: connect ECONNREFUSED 127.0.0.1:6379
|
||||
at createConnectionError (node:net:1678:14)
|
||||
at afterConnectMultiple (node:net:1708:16) {
|
||||
errno: -4078,
|
||||
code: 'ECONNREFUSED',
|
||||
syscall: 'connect',
|
||||
address: '127.0.0.1',
|
||||
port: 6379
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
72 | imports: [ConfigModule],
|
||||
73 | useFactory: async (configService: ConfigService) => ({
|
||||
> 74 | store: await redisStore({
|
||||
| ^
|
||||
75 | socket: {
|
||||
76 | host: configService.get<string>('redis.host'),
|
||||
77 | port: configService.get<number>('redis.port'),
|
||||
|
||||
at redisStore (../../node_modules/.pnpm/cache-manager-redis-yet@5.1.5/node_modules/cache-manager-redis-yet/dist/index.js:101:17)
|
||||
at InstanceWrapper.useFactory [as metatype] (../src/app.module.ts:74:16)
|
||||
at TestingInjector.instantiateClass (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:424:37)
|
||||
at callback (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:70:34)
|
||||
at TestingInjector.resolveConstructorParams (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:170:24)
|
||||
at TestingInjector.loadInstance (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:75:13)
|
||||
at TestingInjector.loadProvider (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/injector.js:103:9)
|
||||
at ../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:56:13
|
||||
at async Promise.all (index 5)
|
||||
at TestingInstanceLoader.createInstancesOfProviders (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:55:9)
|
||||
at ../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:40:13
|
||||
at async Promise.all (index 6)
|
||||
at TestingInstanceLoader.createInstances (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:39:9)
|
||||
at TestingInstanceLoader.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+core@11.1.9_@nestjs_89e063bd3a6d5071b082cab065bf34d7/node_modules/@nestjs/core/injector/instance-loader.js:22:13)
|
||||
at TestingInstanceLoader.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-instance-loader.js:9:9)
|
||||
at TestingModuleBuilder.createInstancesOfDependencies (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-module.builder.js:118:9)
|
||||
at TestingModuleBuilder.compile (../../node_modules/.pnpm/@nestjs+testing@11.1.9_@nes_5fa0f54bf7d8c8acec998f5e81836857/node_modules/@nestjs/testing/testing-module.builder.js:74:9)
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:25:42)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences (POST) - Create Document
|
||||
|
||||
AggregateError:
|
||||
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit Workflow
|
||||
|
||||
AggregateError:
|
||||
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/workflow/action (POST) - Approve Step
|
||||
|
||||
AggregateError:
|
||||
|
||||
|
||||
|
||||
ΓùÅ Test suite failed to run
|
||||
|
||||
TypeError: Cannot read properties of undefined (reading 'close')
|
||||
|
||||
70 | // Correspondence cleanup might be needed if not using a test DB
|
||||
71 | }
|
||||
> 72 | await app.close();
|
||||
| ^
|
||||
73 | });
|
||||
74 |
|
||||
75 | it('/correspondences (POST) - Create Document', async () => {
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:72:15)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 3 failed, 3 total
|
||||
Tests: 5 failed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 8.87 s
|
||||
Ran all test suites.
|
||||
109
backend/e2e-output10.txt
Normal file
109
backend/e2e-output10.txt
Normal file
@@ -0,0 +1,109 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 5332 - 12/09/2025, 11:25:20 AM ERROR [DocumentNumberingService] Failed to log audit
|
||||
[Nest] 5332 - 12/09/2025, 11:25:21 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, ?, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`',
|
||||
parameters: [
|
||||
'ผรม.1-ผรม.1-0003-2568',
|
||||
'doc_num:1:1:0:2025',
|
||||
'{ORG}-{ORG}-{SEQ:4}-{YEAR}',
|
||||
3,
|
||||
0,
|
||||
0
|
||||
],
|
||||
driverError: Error: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0003-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 3, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0003-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 3, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
}
|
||||
[Nest] 5332 - 12/09/2025, 11:25:21 AM ERROR [WorkflowEngineService] Transition Failed for c4765f7d-fb12-4ca8-9fa7-10a237069581: Cannot read properties of undefined (reading 'terminal')
|
||||
[Nest] 5332 - 12/09/2025, 11:25:21 AM ERROR [CorrespondenceWorkflowService] Failed to submit workflow: TypeError: Cannot read properties of undefined (reading 'terminal')
|
||||
[Nest] 5332 - 12/09/2025, 11:25:21 AM ERROR [ExceptionsHandler] TypeError: Cannot read properties of undefined (reading 'terminal')
|
||||
at WorkflowEngineService.processTransition (D:\nap-dms.lcbp3\backend\src\modules\workflow-engine\workflow-engine.service.ts:274:36)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at CorrespondenceWorkflowService.submitWorkflow (D:\nap-dms.lcbp3\backend\src\modules\correspondence\correspondence-workflow.service.ts:73:32)
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.log
|
||||
Created Correspondence ID: 5
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:84:13)
|
||||
|
||||
console.warn
|
||||
Skipping action test - no instanceId from submit
|
||||
|
||||
104 | // Skip if submit failed to get instanceId
|
||||
105 | if (!workflowInstanceId) {
|
||||
> 106 | console.warn('Skipping action test - no instanceId from submit');
|
||||
| ^
|
||||
107 | return;
|
||||
108 | }
|
||||
109 |
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:106:15)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit to Workflow
|
||||
|
||||
expected 201 "Created", got 500 "Internal Server Error"
|
||||
|
||||
92 | note: 'Submitting for E2E test',
|
||||
93 | })
|
||||
> 94 | .expect(201);
|
||||
| ^
|
||||
95 |
|
||||
96 | expect(response.body).toHaveProperty('instanceId');
|
||||
97 | expect(response.body).toHaveProperty('currentState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:94:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 1 failed, 4 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.321 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
100
backend/e2e-output11.txt
Normal file
100
backend/e2e-output11.txt
Normal file
@@ -0,0 +1,100 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 16184 - 12/09/2025, 11:27:54 AM ERROR [DocumentNumberingService] Failed to log audit
|
||||
[Nest] 16184 - 12/09/2025, 11:27:54 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, ?, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`',
|
||||
parameters: [
|
||||
'ผรม.1-ผรม.1-0004-2568',
|
||||
'doc_num:1:1:0:2025',
|
||||
'{ORG}-{ORG}-{SEQ:4}-{YEAR}',
|
||||
4,
|
||||
0,
|
||||
0
|
||||
],
|
||||
driverError: Error: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0004-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 4, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0004-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 4, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
}
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.log
|
||||
Created Correspondence ID: 6
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:84:13)
|
||||
|
||||
console.log
|
||||
Workflow Instance ID: 3577a2e1-bada-4fe7-84f1-876ec83b0624
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:99:13)
|
||||
|
||||
console.log
|
||||
Current State: IN_REVIEW
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:100:13)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/workflow/action (POST) - Process Action
|
||||
|
||||
expected 201 "Created", got 403 "Forbidden"
|
||||
|
||||
116 | comment: 'E2E Approved via Unified Workflow Engine',
|
||||
117 | })
|
||||
> 118 | .expect(201);
|
||||
| ^
|
||||
119 |
|
||||
120 | expect(response.body).toHaveProperty('success', true);
|
||||
121 | expect(response.body).toHaveProperty('nextState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:118:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 1 failed, 4 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.67 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
100
backend/e2e-output12.txt
Normal file
100
backend/e2e-output12.txt
Normal file
@@ -0,0 +1,100 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 7212 - 12/09/2025, 11:32:17 AM ERROR [DocumentNumberingService] Failed to log audit
|
||||
[Nest] 7212 - 12/09/2025, 11:32:17 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, ?, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`',
|
||||
parameters: [
|
||||
'ผรม.1-ผรม.1-0005-2568',
|
||||
'doc_num:1:1:0:2025',
|
||||
'{ORG}-{ORG}-{SEQ:4}-{YEAR}',
|
||||
5,
|
||||
0,
|
||||
0
|
||||
],
|
||||
driverError: Error: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0005-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 5, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0005-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 5, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
}
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.log
|
||||
Created Correspondence ID: 7
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:84:13)
|
||||
|
||||
console.log
|
||||
Workflow Instance ID: 20c439a2-841c-40a1-96e7-5c9f8dfe234f
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:99:13)
|
||||
|
||||
console.log
|
||||
Current State: IN_REVIEW
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:100:13)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/workflow/action (POST) - Process Action
|
||||
|
||||
expected 201 "Created", got 403 "Forbidden"
|
||||
|
||||
116 | comment: 'E2E Approved via Unified Workflow Engine',
|
||||
117 | })
|
||||
> 118 | .expect(201);
|
||||
| ^
|
||||
119 |
|
||||
120 | expect(response.body).toHaveProperty('success', true);
|
||||
121 | expect(response.body).toHaveProperty('nextState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:118:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 1 failed, 4 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.533 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
100
backend/e2e-output13.txt
Normal file
100
backend/e2e-output13.txt
Normal file
@@ -0,0 +1,100 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 46180 - 12/09/2025, 11:40:20 AM ERROR [DocumentNumberingService] Failed to log audit
|
||||
[Nest] 46180 - 12/09/2025, 11:40:20 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, ?, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`',
|
||||
parameters: [
|
||||
'ผรม.1-ผรม.1-0006-2568',
|
||||
'doc_num:1:1:0:2025',
|
||||
'{ORG}-{ORG}-{SEQ:4}-{YEAR}',
|
||||
6,
|
||||
0,
|
||||
0
|
||||
],
|
||||
driverError: Error: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0006-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 6, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0006-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 6, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
}
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.log
|
||||
Created Correspondence ID: 8
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:84:13)
|
||||
|
||||
console.log
|
||||
Workflow Instance ID: 9fc9ddd7-5257-4363-b1f1-f9c22f581b44
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:99:13)
|
||||
|
||||
console.log
|
||||
Current State: IN_REVIEW
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:100:13)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/workflow/action (POST) - Process Action
|
||||
|
||||
expected 201 "Created", got 403 "Forbidden"
|
||||
|
||||
116 | comment: 'E2E Approved via Unified Workflow Engine',
|
||||
117 | })
|
||||
> 118 | .expect(201);
|
||||
| ^
|
||||
119 |
|
||||
120 | expect(response.body).toHaveProperty('success', true);
|
||||
121 | expect(response.body).toHaveProperty('nextState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:118:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 1 failed, 4 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.568 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
84
backend/e2e-output14.txt
Normal file
84
backend/e2e-output14.txt
Normal file
@@ -0,0 +1,84 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 38304 - 12/09/2025, 12:13:26 PM ERROR [DocumentNumberingService] Failed to log audit
|
||||
[Nest] 38304 - 12/09/2025, 12:13:26 PM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, ?, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`',
|
||||
parameters: [
|
||||
'ผรม.1-ผรม.1-0007-2568',
|
||||
'doc_num:1:1:0:2025',
|
||||
'{ORG}-{ORG}-{SEQ:4}-{YEAR}',
|
||||
7,
|
||||
0,
|
||||
0
|
||||
],
|
||||
driverError: Error: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0007-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 7, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0007-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 7, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
}
|
||||
PASS test/phase3-workflow.e2e-spec.ts (5.236 s)
|
||||
ΓùÅ Console
|
||||
|
||||
console.log
|
||||
Created Correspondence ID: 9
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:84:13)
|
||||
|
||||
console.log
|
||||
Workflow Instance ID: d601ef06-93e0-435c-ad76-fc6e3dee5c22
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:99:13)
|
||||
|
||||
console.log
|
||||
Current State: IN_REVIEW
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:100:13)
|
||||
|
||||
console.log
|
||||
Action Result: { success: true, nextState: 'APPROVED', events: [], isCompleted: true }
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:122:13)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
|
||||
Test Suites: 3 passed, 3 total
|
||||
Tests: 5 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 6.691 s
|
||||
Ran all test suites.
|
||||
84
backend/e2e-output15.txt
Normal file
84
backend/e2e-output15.txt
Normal file
@@ -0,0 +1,84 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 38760 - 12/09/2025, 12:16:40 PM ERROR [DocumentNumberingService] Failed to log audit
|
||||
[Nest] 38760 - 12/09/2025, 12:16:40 PM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, ?, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`',
|
||||
parameters: [
|
||||
'ผรม.1-ผรม.1-0008-2568',
|
||||
'doc_num:1:1:0:2025',
|
||||
'{ORG}-{ORG}-{SEQ:4}-{YEAR}',
|
||||
8,
|
||||
0,
|
||||
0
|
||||
],
|
||||
driverError: Error: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0008-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 8, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0008-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 8, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
}
|
||||
PASS test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.log
|
||||
Created Correspondence ID: 10
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:84:13)
|
||||
|
||||
console.log
|
||||
Workflow Instance ID: 5057da48-f0e5-4d1a-86f1-a1b96929a6eb
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:99:13)
|
||||
|
||||
console.log
|
||||
Current State: IN_REVIEW
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:100:13)
|
||||
|
||||
console.log
|
||||
Action Result: { success: true, nextState: 'APPROVED', events: [], isCompleted: true }
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:122:13)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
|
||||
Test Suites: 3 passed, 3 total
|
||||
Tests: 5 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.885 s, estimated 6 s
|
||||
Ran all test suites.
|
||||
63
backend/e2e-output2.txt
Normal file
63
backend/e2e-output2.txt
Normal file
@@ -0,0 +1,63 @@
|
||||
|
||||
> backend@1.5.1 test:e2e
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts (7.275 s)
|
||||
PASS test/app.e2e-spec.ts (7.566 s)
|
||||
FAIL test/phase3-workflow.e2e-spec.ts (7.639 s)
|
||||
● Phase 3 Workflow (E2E) › /correspondences (POST) - Create Document
|
||||
|
||||
QueryFailedError: Table 'lcbp3_dev.correspondence_routing_templates' doesn't exist
|
||||
|
||||
at Query.onResult (../../node_modules/.pnpm/typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08/src/driver/mysql/MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/commands/command.js:36:14)
|
||||
at PoolConnection.handlePacket (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:477:34)
|
||||
at PacketParser.onPacket (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:93:12)
|
||||
at PacketParser.executeStart (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:100:25)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit Workflow
|
||||
|
||||
QueryFailedError: Table 'lcbp3_dev.correspondence_routing_templates' doesn't exist
|
||||
|
||||
at Query.onResult (../../node_modules/.pnpm/typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08/src/driver/mysql/MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/commands/command.js:36:14)
|
||||
at PoolConnection.handlePacket (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:477:34)
|
||||
at PacketParser.onPacket (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:93:12)
|
||||
at PacketParser.executeStart (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:100:25)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/workflow/action (POST) - Approve Step
|
||||
|
||||
QueryFailedError: Table 'lcbp3_dev.correspondence_routing_templates' doesn't exist
|
||||
|
||||
at Query.onResult (../../node_modules/.pnpm/typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08/src/driver/mysql/MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/commands/command.js:36:14)
|
||||
at PoolConnection.handlePacket (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:477:34)
|
||||
at PacketParser.onPacket (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:93:12)
|
||||
at PacketParser.executeStart (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (../../node_modules/.pnpm/mysql2@3.15.3/node_modules/mysql2/lib/base/connection.js:100:25)
|
||||
|
||||
|
||||
ΓùÅ Test suite failed to run
|
||||
|
||||
TypeORMError: Empty criteria(s) are not allowed for the delete method.
|
||||
|
||||
67 | if (dataSource) {
|
||||
68 | const templateRepo = dataSource.getRepository(RoutingTemplate);
|
||||
> 69 | await templateRepo.delete(templateId);
|
||||
| ^
|
||||
70 | // Correspondence cleanup might be needed if not using a test DB
|
||||
71 | }
|
||||
72 | await app.close();
|
||||
|
||||
at EntityManager.delete (../../node_modules/.pnpm/typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08/src/entity-manager/EntityManager.ts:849:17)
|
||||
at Repository.delete (../../node_modules/.pnpm/typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08/src/repository/Repository.ts:420:35)
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:69:32)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 3 failed, 2 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 9.08 s
|
||||
Ran all test suites.
|
||||
165
backend/e2e-output3.txt
Normal file
165
backend/e2e-output3.txt
Normal file
@@ -0,0 +1,165 @@
|
||||
|
||||
> backend@1.5.1 test:e2e
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
[Nest] 28712 - 12/09/2025, 9:48:43 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (1)...
|
||||
TypeORMError: Entity metadata for RoutingTemplate#steps was not found. Check if you specified a correct entity object and if it's connected in the connection options.
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1128:23
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.computeInverseProperties (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1118:34)
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:160:18
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.build (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:159:25)
|
||||
at ConnectionMetadataBuilder.buildEntityMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\connection\ConnectionMetadataBuilder.ts:106:11)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at DataSource.buildMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:733:13)
|
||||
at DataSource.initialize (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:264:13)
|
||||
[Nest] 40512 - 12/09/2025, 9:48:43 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (1)...
|
||||
TypeORMError: Entity metadata for RoutingTemplate#steps was not found. Check if you specified a correct entity object and if it's connected in the connection options.
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1128:23
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.computeInverseProperties (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1118:34)
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:160:18
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.build (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:159:25)
|
||||
at ConnectionMetadataBuilder.buildEntityMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\connection\ConnectionMetadataBuilder.ts:106:11)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at DataSource.buildMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:733:13)
|
||||
at DataSource.initialize (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:264:13)
|
||||
[Nest] 41884 - 12/09/2025, 9:48:43 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (1)...
|
||||
TypeORMError: Entity metadata for RoutingTemplate#steps was not found. Check if you specified a correct entity object and if it's connected in the connection options.
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1128:23
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.computeInverseProperties (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1118:34)
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:160:18
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.build (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:159:25)
|
||||
at ConnectionMetadataBuilder.buildEntityMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\connection\ConnectionMetadataBuilder.ts:106:11)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at DataSource.buildMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:733:13)
|
||||
at DataSource.initialize (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:264:13)
|
||||
[Nest] 41884 - 12/09/2025, 9:48:46 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (2)...
|
||||
TypeORMError: Entity metadata for RoutingTemplate#steps was not found. Check if you specified a correct entity object and if it's connected in the connection options.
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1128:23
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.computeInverseProperties (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1118:34)
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:160:18
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.build (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:159:25)
|
||||
at ConnectionMetadataBuilder.buildEntityMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\connection\ConnectionMetadataBuilder.ts:106:11)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at DataSource.buildMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:733:13)
|
||||
at DataSource.initialize (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:264:13)
|
||||
[Nest] 28712 - 12/09/2025, 9:48:46 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (2)...
|
||||
TypeORMError: Entity metadata for RoutingTemplate#steps was not found. Check if you specified a correct entity object and if it's connected in the connection options.
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1128:23
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.computeInverseProperties (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1118:34)
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:160:18
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.build (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:159:25)
|
||||
at ConnectionMetadataBuilder.buildEntityMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\connection\ConnectionMetadataBuilder.ts:106:11)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at DataSource.buildMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:733:13)
|
||||
at DataSource.initialize (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:264:13)
|
||||
[Nest] 40512 - 12/09/2025, 9:48:46 AM ERROR [TypeOrmModule] Unable to connect to the database. Retrying (2)...
|
||||
TypeORMError: Entity metadata for RoutingTemplate#steps was not found. Check if you specified a correct entity object and if it's connected in the connection options.
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1128:23
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.computeInverseProperties (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:1118:34)
|
||||
at D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:160:18
|
||||
at Array.forEach (<anonymous>)
|
||||
at EntityMetadataBuilder.build (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\metadata-builder\EntityMetadataBuilder.ts:159:25)
|
||||
at ConnectionMetadataBuilder.buildEntityMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\connection\ConnectionMetadataBuilder.ts:106:11)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at DataSource.buildMetadatas (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:733:13)
|
||||
at DataSource.initialize (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\data-source\DataSource.ts:264:13)
|
||||
FAIL test/app.e2e-spec.ts (8.781 s)
|
||||
● AppController (e2e) › / (GET)
|
||||
|
||||
thrown: "Exceeded timeout of 5000 ms for a hook.
|
||||
Add a timeout value to this test to increase the timeout, if this is a long-running test. See https://jestjs.io/docs/api#testname-fn-timeout."
|
||||
|
||||
8 | let app: INestApplication<App>;
|
||||
9 |
|
||||
> 10 | beforeEach(async () => {
|
||||
| ^
|
||||
11 | const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
12 | imports: [AppModule],
|
||||
13 | }).compile();
|
||||
|
||||
at app.e2e-spec.ts:10:3
|
||||
at Object.<anonymous> (app.e2e-spec.ts:7:1)
|
||||
|
||||
FAIL test/phase3-workflow.e2e-spec.ts (8.787 s)
|
||||
● Phase 3 Workflow (E2E) › /correspondences (POST) - Create Document
|
||||
|
||||
thrown: "Exceeded timeout of 5000 ms for a hook.
|
||||
Add a timeout value to this test to increase the timeout, if this is a long-running test. See https://jestjs.io/docs/api#testname-fn-timeout."
|
||||
|
||||
27 | let adminToken: string;
|
||||
28 |
|
||||
> 29 | beforeAll(async () => {
|
||||
| ^
|
||||
30 | const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
31 | imports: [AppModule],
|
||||
32 | }).compile();
|
||||
|
||||
at phase3-workflow.e2e-spec.ts:29:3
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:15:1)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit to Workflow
|
||||
|
||||
thrown: "Exceeded timeout of 5000 ms for a hook.
|
||||
Add a timeout value to this test to increase the timeout, if this is a long-running test. See https://jestjs.io/docs/api#testname-fn-timeout."
|
||||
|
||||
27 | let adminToken: string;
|
||||
28 |
|
||||
> 29 | beforeAll(async () => {
|
||||
| ^
|
||||
30 | const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
31 | imports: [AppModule],
|
||||
32 | }).compile();
|
||||
|
||||
at phase3-workflow.e2e-spec.ts:29:3
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:15:1)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/workflow/action (POST) - Process Action
|
||||
|
||||
thrown: "Exceeded timeout of 5000 ms for a hook.
|
||||
Add a timeout value to this test to increase the timeout, if this is a long-running test. See https://jestjs.io/docs/api#testname-fn-timeout."
|
||||
|
||||
27 | let adminToken: string;
|
||||
28 |
|
||||
> 29 | beforeAll(async () => {
|
||||
| ^
|
||||
30 | const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
31 | imports: [AppModule],
|
||||
32 | }).compile();
|
||||
|
||||
at phase3-workflow.e2e-spec.ts:29:3
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:15:1)
|
||||
|
||||
FAIL test/simple.e2e-spec.ts (8.797 s)
|
||||
● Simple Test › should pass
|
||||
|
||||
thrown: "Exceeded timeout of 5000 ms for a test.
|
||||
Add a timeout value to this test to increase the timeout, if this is a long-running test. See https://jestjs.io/docs/api#testname-fn-timeout."
|
||||
|
||||
6 |
|
||||
7 | describe('Simple Test', () => {
|
||||
> 8 | it('should pass', async () => {
|
||||
| ^
|
||||
9 | const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
10 | imports: [AppModule],
|
||||
11 | }).compile();
|
||||
|
||||
at simple.e2e-spec.ts:8:3
|
||||
at Object.<anonymous> (simple.e2e-spec.ts:7:1)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 3 failed, 3 total
|
||||
Tests: 5 failed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 9.98 s
|
||||
Ran all test suites.
|
||||
83
backend/e2e-output4.txt
Normal file
83
backend/e2e-output4.txt
Normal file
@@ -0,0 +1,83 @@
|
||||
|
||||
> backend@1.5.1 test:e2e
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.warn
|
||||
WorkflowDefinition CORRESPONDENCE_FLOW_V1 not found. Tests may fail.
|
||||
|
||||
55 |
|
||||
56 | if (!existing) {
|
||||
> 57 | console.warn(
|
||||
| ^
|
||||
58 | 'WorkflowDefinition CORRESPONDENCE_FLOW_V1 not found. Tests may fail.'
|
||||
59 | );
|
||||
60 | }
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:57:15)
|
||||
|
||||
console.warn
|
||||
Skipping action test - no instanceId from submit
|
||||
|
||||
104 | // Skip if submit failed to get instanceId
|
||||
105 | if (!workflowInstanceId) {
|
||||
> 106 | console.warn('Skipping action test - no instanceId from submit');
|
||||
| ^
|
||||
107 | return;
|
||||
108 | }
|
||||
109 |
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:106:15)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences (POST) - Create Document
|
||||
|
||||
expected 201 "Created", got 403 "Forbidden"
|
||||
|
||||
77 | details: { question: 'Testing Unified Workflow' },
|
||||
78 | })
|
||||
> 79 | .expect(201);
|
||||
| ^
|
||||
80 |
|
||||
81 | expect(response.body).toHaveProperty('id');
|
||||
82 | expect(response.body).toHaveProperty('correspondenceNumber');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:79:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit to Workflow
|
||||
|
||||
expected 201 "Created", got 403 "Forbidden"
|
||||
|
||||
92 | note: 'Submitting for E2E test',
|
||||
93 | })
|
||||
> 94 | .expect(201);
|
||||
| ^
|
||||
95 |
|
||||
96 | expect(response.body).toHaveProperty('instanceId');
|
||||
97 | expect(response.body).toHaveProperty('currentState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:94:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 2 failed, 3 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.219 s, estimated 9 s
|
||||
Ran all test suites.
|
||||
214
backend/e2e-output5.txt
Normal file
214
backend/e2e-output5.txt
Normal file
@@ -0,0 +1,214 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 45012 - 12/09/2025, 10:04:29 AM ERROR [DocumentNumberingService] Error generating number for doc_num:1:1:0:2025
|
||||
[Nest] 45012 - 12/09/2025, 10:04:29 AM ERROR [DocumentNumberingService] QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `correspondence_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (?, ?, ?, ?, ?, ?, 1) RETURNING `discipline_id`, `last_number`, `version`',
|
||||
parameters: [
|
||||
1,
|
||||
41,
|
||||
1,
|
||||
0,
|
||||
2025,
|
||||
1
|
||||
],
|
||||
driverError: Error: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `correspondence_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, 1, 0, 2025, 1, 1) RETURNING `discipline_id`, `last_number`, `version`'
|
||||
},
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `correspondence_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, 1, 0, 2025, 1, 1) RETURNING `discipline_id`, `last_number`, `version`'
|
||||
}
|
||||
[Nest] 45012 - 12/09/2025, 10:04:29 AM ERROR [DocumentNumberingService] Failed to log error
|
||||
[Nest] 45012 - 12/09/2025, 10:04:29 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'error_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, DEFAULT) RETURNING `id`, `error_at`',
|
||||
parameters: [
|
||||
'doc_num:1:1:0:2025',
|
||||
'DB_ERROR',
|
||||
'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\n at Query.onResult (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\src\\driver\\mysql\\MysqlQueryRunner.ts:248:33)\n at Query.execute (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\commands\\command.js:36:14)\n at PoolConnection.handlePacket (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:477:34)\n at PacketParser.onPacket (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:93:12)\n at PacketParser.executeStart (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\packet_parser.js:75:16)\n at Socket.<anonymous> (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:100:25)\n at Socket.emit (node:events:519:28)\n at addChunk (node:internal/streams/readable:561:12)\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\n at Socket.Readable.push (node:internal/streams/readable:392:5)\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)',
|
||||
'{"projectId":1,"originatorId":41,"typeId":1,"year":2025,"customTokens":{"TYPE_CODE":"RFA","ORG_CODE":"ORG"}}'
|
||||
],
|
||||
driverError: Error: Unknown column 'error_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'error_at' in 'RETURNING'",
|
||||
sql: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, \'doc_num:1:1:0:2025\', \'DB_ERROR\', \'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\', \'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\\n at Query.onResult (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\\\src\\\\driver\\\\mysql\\\\MysqlQueryRunner.ts:248:33)\\n at Query.execute (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\commands\\\\command.js:36:14)\\n at PoolConnection.handlePacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:477:34)\\n at PacketParser.onPacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:93:12)\\n at PacketParser.executeStart (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\packet_parser.js:75:16)\\n at Socket.<anonymous> (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:100:25)\\n at Socket.emit (node:events:519:28)\\n at addChunk (node:internal/streams/readable:561:12)\\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\\n at Socket.Readable.push (node:internal/streams/readable:392:5)\\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)\', DEFAULT, DEFAULT, \'{\\"projectId\\":1,\\"originatorId\\":41,\\"typeId\\":1,\\"year\\":2025,\\"customTokens\\":{\\"TYPE_CODE\\":\\"RFA\\",\\"ORG_CODE\\":\\"ORG\\"}}\', DEFAULT) RETURNING `id`, `error_at`'
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'error_at' in 'RETURNING'",
|
||||
sql: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, \'doc_num:1:1:0:2025\', \'DB_ERROR\', \'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\', \'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\\n at Query.onResult (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\\\src\\\\driver\\\\mysql\\\\MysqlQueryRunner.ts:248:33)\\n at Query.execute (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\commands\\\\command.js:36:14)\\n at PoolConnection.handlePacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:477:34)\\n at PacketParser.onPacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:93:12)\\n at PacketParser.executeStart (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\packet_parser.js:75:16)\\n at Socket.<anonymous> (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:100:25)\\n at Socket.emit (node:events:519:28)\\n at addChunk (node:internal/streams/readable:561:12)\\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\\n at Socket.Readable.push (node:internal/streams/readable:392:5)\\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)\', DEFAULT, DEFAULT, \'{\\"projectId\\":1,\\"originatorId\\":41,\\"typeId\\":1,\\"year\\":2025,\\"customTokens\\":{\\"TYPE_CODE\\":\\"RFA\\",\\"ORG_CODE\\":\\"ORG\\"}}\', DEFAULT) RETURNING `id`, `error_at`'
|
||||
}
|
||||
[Nest] 45012 - 12/09/2025, 10:04:29 AM ERROR [CorrespondenceService] Failed to create correspondence: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
[Nest] 45012 - 12/09/2025, 10:04:29 AM ERROR [ExceptionsHandler] QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `correspondence_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (?, ?, ?, ?, ?, ?, 1) RETURNING `discipline_id`, `last_number`, `version`',
|
||||
parameters: [
|
||||
1,
|
||||
41,
|
||||
1,
|
||||
0,
|
||||
2025,
|
||||
1
|
||||
],
|
||||
driverError: Error: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `correspondence_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, 1, 0, 2025, 1, 1) RETURNING `discipline_id`, `last_number`, `version`'
|
||||
},
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `correspondence_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, 1, 0, 2025, 1, 1) RETURNING `discipline_id`, `last_number`, `version`'
|
||||
}
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.warn
|
||||
Skipping action test - no instanceId from submit
|
||||
|
||||
104 | // Skip if submit failed to get instanceId
|
||||
105 | if (!workflowInstanceId) {
|
||||
> 106 | console.warn('Skipping action test - no instanceId from submit');
|
||||
| ^
|
||||
107 | return;
|
||||
108 | }
|
||||
109 |
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:106:15)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences (POST) - Create Document
|
||||
|
||||
expected 201 "Created", got 500 "Internal Server Error"
|
||||
|
||||
77 | details: { question: 'Testing Unified Workflow' },
|
||||
78 | })
|
||||
> 79 | .expect(201);
|
||||
| ^
|
||||
80 |
|
||||
81 | expect(response.body).toHaveProperty('id');
|
||||
82 | expect(response.body).toHaveProperty('correspondenceNumber');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:79:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit to Workflow
|
||||
|
||||
expected 201 "Created", got 400 "Bad Request"
|
||||
|
||||
92 | note: 'Submitting for E2E test',
|
||||
93 | })
|
||||
> 94 | .expect(201);
|
||||
| ^
|
||||
95 |
|
||||
96 | expect(response.body).toHaveProperty('instanceId');
|
||||
97 | expect(response.body).toHaveProperty('currentState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:94:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 2 failed, 3 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.122 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
220
backend/e2e-output6.txt
Normal file
220
backend/e2e-output6.txt
Normal file
@@ -0,0 +1,220 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts (7.012 s)
|
||||
PASS test/app.e2e-spec.ts (7.175 s)
|
||||
[Nest] 22264 - 12/09/2025, 10:27:45 AM ERROR [DocumentNumberingService] Error generating number for doc_num:1:1:0:2025
|
||||
[Nest] 22264 - 12/09/2025, 10:27:45 AM ERROR [DocumentNumberingService] QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`',
|
||||
parameters: [
|
||||
1,
|
||||
41,
|
||||
-1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
2025,
|
||||
1
|
||||
],
|
||||
driverError: Error: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, -1, 1, 0, 0, 0, 2025, 1, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`'
|
||||
},
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, -1, 1, 0, 0, 0, 2025, 1, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`'
|
||||
}
|
||||
[Nest] 22264 - 12/09/2025, 10:27:45 AM ERROR [DocumentNumberingService] Failed to log error
|
||||
[Nest] 22264 - 12/09/2025, 10:27:45 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'error_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, DEFAULT) RETURNING `id`, `error_at`',
|
||||
parameters: [
|
||||
'doc_num:1:1:0:2025',
|
||||
'DB_ERROR',
|
||||
'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\n at Query.onResult (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\src\\driver\\mysql\\MysqlQueryRunner.ts:248:33)\n at Query.execute (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\commands\\command.js:36:14)\n at PoolConnection.handlePacket (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:477:34)\n at PacketParser.onPacket (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:93:12)\n at PacketParser.executeStart (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\packet_parser.js:75:16)\n at Socket.<anonymous> (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:100:25)\n at Socket.emit (node:events:519:28)\n at addChunk (node:internal/streams/readable:561:12)\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\n at Socket.Readable.push (node:internal/streams/readable:392:5)\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)',
|
||||
'{"projectId":1,"originatorId":41,"typeId":1,"year":2025,"customTokens":{"TYPE_CODE":"RFA","ORG_CODE":"ORG"}}'
|
||||
],
|
||||
driverError: Error: Unknown column 'error_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'error_at' in 'RETURNING'",
|
||||
sql: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, \'doc_num:1:1:0:2025\', \'DB_ERROR\', \'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\', \'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\\n at Query.onResult (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\\\src\\\\driver\\\\mysql\\\\MysqlQueryRunner.ts:248:33)\\n at Query.execute (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\commands\\\\command.js:36:14)\\n at PoolConnection.handlePacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:477:34)\\n at PacketParser.onPacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:93:12)\\n at PacketParser.executeStart (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\packet_parser.js:75:16)\\n at Socket.<anonymous> (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:100:25)\\n at Socket.emit (node:events:519:28)\\n at addChunk (node:internal/streams/readable:561:12)\\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\\n at Socket.Readable.push (node:internal/streams/readable:392:5)\\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)\', DEFAULT, DEFAULT, \'{\\"projectId\\":1,\\"originatorId\\":41,\\"typeId\\":1,\\"year\\":2025,\\"customTokens\\":{\\"TYPE_CODE\\":\\"RFA\\",\\"ORG_CODE\\":\\"ORG\\"}}\', DEFAULT) RETURNING `id`, `error_at`'
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'error_at' in 'RETURNING'",
|
||||
sql: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, \'doc_num:1:1:0:2025\', \'DB_ERROR\', \'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\', \'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\\n at Query.onResult (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\\\src\\\\driver\\\\mysql\\\\MysqlQueryRunner.ts:248:33)\\n at Query.execute (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\commands\\\\command.js:36:14)\\n at PoolConnection.handlePacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:477:34)\\n at PacketParser.onPacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:93:12)\\n at PacketParser.executeStart (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\packet_parser.js:75:16)\\n at Socket.<anonymous> (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:100:25)\\n at Socket.emit (node:events:519:28)\\n at addChunk (node:internal/streams/readable:561:12)\\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\\n at Socket.Readable.push (node:internal/streams/readable:392:5)\\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)\', DEFAULT, DEFAULT, \'{\\"projectId\\":1,\\"originatorId\\":41,\\"typeId\\":1,\\"year\\":2025,\\"customTokens\\":{\\"TYPE_CODE\\":\\"RFA\\",\\"ORG_CODE\\":\\"ORG\\"}}\', DEFAULT) RETURNING `id`, `error_at`'
|
||||
}
|
||||
[Nest] 22264 - 12/09/2025, 10:27:45 AM ERROR [CorrespondenceService] Failed to create correspondence: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
[Nest] 22264 - 12/09/2025, 10:27:45 AM ERROR [ExceptionsHandler] QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`',
|
||||
parameters: [
|
||||
1,
|
||||
41,
|
||||
-1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
2025,
|
||||
1
|
||||
],
|
||||
driverError: Error: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, -1, 1, 0, 0, 0, 2025, 1, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`'
|
||||
},
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `document_number_counters_ibfk_3` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, -1, 1, 0, 0, 0, 2025, 1, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`'
|
||||
}
|
||||
FAIL test/phase3-workflow.e2e-spec.ts (7.412 s)
|
||||
ΓùÅ Console
|
||||
|
||||
console.warn
|
||||
Skipping action test - no instanceId from submit
|
||||
|
||||
104 | // Skip if submit failed to get instanceId
|
||||
105 | if (!workflowInstanceId) {
|
||||
> 106 | console.warn('Skipping action test - no instanceId from submit');
|
||||
| ^
|
||||
107 | return;
|
||||
108 | }
|
||||
109 |
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:106:15)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences (POST) - Create Document
|
||||
|
||||
expected 201 "Created", got 500 "Internal Server Error"
|
||||
|
||||
77 | details: { question: 'Testing Unified Workflow' },
|
||||
78 | })
|
||||
> 79 | .expect(201);
|
||||
| ^
|
||||
80 |
|
||||
81 | expect(response.body).toHaveProperty('id');
|
||||
82 | expect(response.body).toHaveProperty('correspondenceNumber');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:79:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit to Workflow
|
||||
|
||||
expected 201 "Created", got 400 "Bad Request"
|
||||
|
||||
92 | note: 'Submitting for E2E test',
|
||||
93 | })
|
||||
> 94 | .expect(201);
|
||||
| ^
|
||||
95 |
|
||||
96 | expect(response.body).toHaveProperty('instanceId');
|
||||
97 | expect(response.body).toHaveProperty('currentState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:94:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 2 failed, 3 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 8.723 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
220
backend/e2e-output7.txt
Normal file
220
backend/e2e-output7.txt
Normal file
@@ -0,0 +1,220 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 44520 - 12/09/2025, 11:16:08 AM ERROR [DocumentNumberingService] Error generating number for doc_num:1:1:0:2025
|
||||
[Nest] 44520 - 12/09/2025, 11:16:08 AM ERROR [DocumentNumberingService] QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`',
|
||||
parameters: [
|
||||
1,
|
||||
41,
|
||||
-1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
2025,
|
||||
1
|
||||
],
|
||||
driverError: Error: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, -1, 1, 0, 0, 0, 2025, 1, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`'
|
||||
},
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, -1, 1, 0, 0, 0, 2025, 1, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`'
|
||||
}
|
||||
[Nest] 44520 - 12/09/2025, 11:16:08 AM ERROR [DocumentNumberingService] Failed to log error
|
||||
[Nest] 44520 - 12/09/2025, 11:16:08 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'error_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, DEFAULT) RETURNING `id`, `error_at`',
|
||||
parameters: [
|
||||
'doc_num:1:1:0:2025',
|
||||
'DB_ERROR',
|
||||
'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\n at Query.onResult (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\src\\driver\\mysql\\MysqlQueryRunner.ts:248:33)\n at Query.execute (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\commands\\command.js:36:14)\n at PoolConnection.handlePacket (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:477:34)\n at PacketParser.onPacket (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:93:12)\n at PacketParser.executeStart (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\packet_parser.js:75:16)\n at Socket.<anonymous> (D:\\nap-dms.lcbp3\\node_modules\\.pnpm\\mysql2@3.15.3\\node_modules\\mysql2\\lib\\base\\connection.js:100:25)\n at Socket.emit (node:events:519:28)\n at addChunk (node:internal/streams/readable:561:12)\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\n at Socket.Readable.push (node:internal/streams/readable:392:5)\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)',
|
||||
'{"projectId":1,"originatorId":41,"typeId":1,"year":2025,"customTokens":{"TYPE_CODE":"RFA","ORG_CODE":"ORG"}}'
|
||||
],
|
||||
driverError: Error: Unknown column 'error_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'error_at' in 'RETURNING'",
|
||||
sql: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, \'doc_num:1:1:0:2025\', \'DB_ERROR\', \'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\', \'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\\n at Query.onResult (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\\\src\\\\driver\\\\mysql\\\\MysqlQueryRunner.ts:248:33)\\n at Query.execute (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\commands\\\\command.js:36:14)\\n at PoolConnection.handlePacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:477:34)\\n at PacketParser.onPacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:93:12)\\n at PacketParser.executeStart (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\packet_parser.js:75:16)\\n at Socket.<anonymous> (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:100:25)\\n at Socket.emit (node:events:519:28)\\n at addChunk (node:internal/streams/readable:561:12)\\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\\n at Socket.Readable.push (node:internal/streams/readable:392:5)\\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)\', DEFAULT, DEFAULT, \'{\\"projectId\\":1,\\"originatorId\\":41,\\"typeId\\":1,\\"year\\":2025,\\"customTokens\\":{\\"TYPE_CODE\\":\\"RFA\\",\\"ORG_CODE\\":\\"ORG\\"}}\', DEFAULT) RETURNING `id`, `error_at`'
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'error_at' in 'RETURNING'",
|
||||
sql: 'INSERT INTO `document_number_errors`(`id`, `counter_key`, `error_type`, `error_message`, `stack_trace`, `user_id`, `ip_address`, `context`, `error_at`) VALUES (DEFAULT, \'doc_num:1:1:0:2025\', \'DB_ERROR\', \'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\', \'QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)\\n at Query.onResult (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\\\\src\\\\driver\\\\mysql\\\\MysqlQueryRunner.ts:248:33)\\n at Query.execute (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\commands\\\\command.js:36:14)\\n at PoolConnection.handlePacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:477:34)\\n at PacketParser.onPacket (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:93:12)\\n at PacketParser.executeStart (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\packet_parser.js:75:16)\\n at Socket.<anonymous> (D:\\\\nap-dms.lcbp3\\\\node_modules\\\\.pnpm\\\\mysql2@3.15.3\\\\node_modules\\\\mysql2\\\\lib\\\\base\\\\connection.js:100:25)\\n at Socket.emit (node:events:519:28)\\n at addChunk (node:internal/streams/readable:561:12)\\n at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)\\n at Socket.Readable.push (node:internal/streams/readable:392:5)\\n at TCP.onStreamRead (node:internal/stream_base_commons:189:23)\\n at TCP.callbackTrampoline (node:internal/async_hooks:130:17)\', DEFAULT, DEFAULT, \'{\\"projectId\\":1,\\"originatorId\\":41,\\"typeId\\":1,\\"year\\":2025,\\"customTokens\\":{\\"TYPE_CODE\\":\\"RFA\\",\\"ORG_CODE\\":\\"ORG\\"}}\', DEFAULT) RETURNING `id`, `error_at`'
|
||||
}
|
||||
[Nest] 44520 - 12/09/2025, 11:16:08 AM ERROR [CorrespondenceService] Failed to create correspondence: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
[Nest] 44520 - 12/09/2025, 11:16:08 AM ERROR [ExceptionsHandler] QueryFailedError: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`',
|
||||
parameters: [
|
||||
1,
|
||||
41,
|
||||
-1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
2025,
|
||||
1
|
||||
],
|
||||
driverError: Error: Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, -1, 1, 0, 0, 0, 2025, 1, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`'
|
||||
},
|
||||
code: 'ER_NO_REFERENCED_ROW_2',
|
||||
errno: 1452,
|
||||
sqlState: '23000',
|
||||
sqlMessage: 'Cannot add or update a child row: a foreign key constraint fails (`lcbp3_dev`.`document_number_counters`, CONSTRAINT `fk_recipient_when_not_all` FOREIGN KEY (`recipient_organization_id`) REFERENCES `organizations` (`id`) ON DELETE CASCADE)',
|
||||
sql: 'INSERT INTO `document_number_counters`(`project_id`, `originator_organization_id`, `recipient_organization_id`, `correspondence_type_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `current_year`, `last_number`, `version`) VALUES (1, 41, -1, 1, 0, 0, 0, 2025, 1, 1) RETURNING `recipient_organization_id`, `sub_type_id`, `rfa_type_id`, `discipline_id`, `last_number`, `version`'
|
||||
}
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.warn
|
||||
Skipping action test - no instanceId from submit
|
||||
|
||||
104 | // Skip if submit failed to get instanceId
|
||||
105 | if (!workflowInstanceId) {
|
||||
> 106 | console.warn('Skipping action test - no instanceId from submit');
|
||||
| ^
|
||||
107 | return;
|
||||
108 | }
|
||||
109 |
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:106:15)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences (POST) - Create Document
|
||||
|
||||
expected 201 "Created", got 500 "Internal Server Error"
|
||||
|
||||
77 | details: { question: 'Testing Unified Workflow' },
|
||||
78 | })
|
||||
> 79 | .expect(201);
|
||||
| ^
|
||||
80 |
|
||||
81 | expect(response.body).toHaveProperty('id');
|
||||
82 | expect(response.body).toHaveProperty('correspondenceNumber');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:79:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit to Workflow
|
||||
|
||||
expected 201 "Created", got 400 "Bad Request"
|
||||
|
||||
92 | note: 'Submitting for E2E test',
|
||||
93 | })
|
||||
> 94 | .expect(201);
|
||||
| ^
|
||||
95 |
|
||||
96 | expect(response.body).toHaveProperty('instanceId');
|
||||
97 | expect(response.body).toHaveProperty('currentState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:94:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 2 failed, 3 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.786 s, estimated 8 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
111
backend/e2e-output8.txt
Normal file
111
backend/e2e-output8.txt
Normal file
@@ -0,0 +1,111 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 25968 - 12/09/2025, 11:19:28 AM ERROR [DocumentNumberingService] Failed to log audit
|
||||
[Nest] 25968 - 12/09/2025, 11:19:28 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, ?, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`',
|
||||
parameters: [
|
||||
'ผรม.1-ผรม.1-0001-2568',
|
||||
'doc_num:1:1:0:2025',
|
||||
'{ORG}-{ORG}-{SEQ:4}-{YEAR}',
|
||||
1,
|
||||
0,
|
||||
0
|
||||
],
|
||||
driverError: Error: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0001-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 1, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0001-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 1, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
}
|
||||
[Nest] 25968 - 12/09/2025, 11:19:28 AM ERROR [WorkflowEngineService] Transition Failed for 1215d0aa-453f-46dc-845d-0488a0213c4a: Cannot read properties of undefined (reading 'roles')
|
||||
[Nest] 25968 - 12/09/2025, 11:19:28 AM ERROR [CorrespondenceWorkflowService] Failed to submit workflow: TypeError: Cannot read properties of undefined (reading 'roles')
|
||||
[Nest] 25968 - 12/09/2025, 11:19:28 AM ERROR [ExceptionsHandler] TypeError: Cannot read properties of undefined (reading 'roles')
|
||||
at WorkflowDslService.checkRequirements (D:\nap-dms.lcbp3\backend\src\modules\workflow-engine\workflow-dsl.service.ts:219:13)
|
||||
at WorkflowDslService.evaluate (D:\nap-dms.lcbp3\backend\src\modules\workflow-engine\workflow-dsl.service.ts:178:10)
|
||||
at WorkflowEngineService.processTransition (D:\nap-dms.lcbp3\backend\src\modules\workflow-engine\workflow-engine.service.ts:259:42)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at CorrespondenceWorkflowService.submitWorkflow (D:\nap-dms.lcbp3\backend\src\modules\correspondence\correspondence-workflow.service.ts:72:32)
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.log
|
||||
Created Correspondence ID: 3
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:84:13)
|
||||
|
||||
console.warn
|
||||
Skipping action test - no instanceId from submit
|
||||
|
||||
104 | // Skip if submit failed to get instanceId
|
||||
105 | if (!workflowInstanceId) {
|
||||
> 106 | console.warn('Skipping action test - no instanceId from submit');
|
||||
| ^
|
||||
107 | return;
|
||||
108 | }
|
||||
109 |
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:106:15)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit to Workflow
|
||||
|
||||
expected 201 "Created", got 500 "Internal Server Error"
|
||||
|
||||
92 | note: 'Submitting for E2E test',
|
||||
93 | })
|
||||
> 94 | .expect(201);
|
||||
| ^
|
||||
95 |
|
||||
96 | expect(response.body).toHaveProperty('instanceId');
|
||||
97 | expect(response.body).toHaveProperty('currentState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:94:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 1 failed, 4 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.439 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
111
backend/e2e-output9.txt
Normal file
111
backend/e2e-output9.txt
Normal file
@@ -0,0 +1,111 @@
|
||||
|
||||
> backend@1.5.1 test:e2e D:\nap-dms.lcbp3\backend
|
||||
> jest --config ./test/jest-e2e.json
|
||||
|
||||
PASS test/simple.e2e-spec.ts
|
||||
PASS test/app.e2e-spec.ts
|
||||
[Nest] 35280 - 12/09/2025, 11:24:24 AM ERROR [DocumentNumberingService] Failed to log audit
|
||||
[Nest] 35280 - 12/09/2025, 11:24:24 AM ERROR [DocumentNumberingService] QueryFailedError: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Query.onResult (D:\nap-dms.lcbp3\node_modules\.pnpm\typeorm@0.3.27_ioredis@5.8._cb81dfd56f1203fe00eb0fec5dfcce08\src\driver\mysql\MysqlQueryRunner.ts:248:33)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:36:14)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
query: 'INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, ?, ?, ?, ?, DEFAULT, DEFAULT, ?, ?, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`',
|
||||
parameters: [
|
||||
'ผรม.1-ผรม.1-0002-2568',
|
||||
'doc_num:1:1:0:2025',
|
||||
'{ORG}-{ORG}-{SEQ:4}-{YEAR}',
|
||||
2,
|
||||
0,
|
||||
0
|
||||
],
|
||||
driverError: Error: Unknown column 'generated_at' in 'RETURNING'
|
||||
at Packet.asError (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packets\packet.js:740:17)
|
||||
at Query.execute (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\commands\command.js:29:26)
|
||||
at PoolConnection.handlePacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:477:34)
|
||||
at PacketParser.onPacket (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:93:12)
|
||||
at PacketParser.executeStart (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\packet_parser.js:75:16)
|
||||
at Socket.<anonymous> (D:\nap-dms.lcbp3\node_modules\.pnpm\mysql2@3.15.3\node_modules\mysql2\lib\base\connection.js:100:25)
|
||||
at Socket.emit (node:events:519:28)
|
||||
at addChunk (node:internal/streams/readable:561:12)
|
||||
at readableAddChunkPushByteMode (node:internal/streams/readable:512:3)
|
||||
at Socket.Readable.push (node:internal/streams/readable:392:5)
|
||||
at TCP.onStreamRead (node:internal/stream_base_commons:189:23)
|
||||
at TCP.callbackTrampoline (node:internal/async_hooks:130:17) {
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0002-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 2, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
},
|
||||
code: 'ER_BAD_FIELD_ERROR',
|
||||
errno: 1054,
|
||||
sqlState: '42S22',
|
||||
sqlMessage: "Unknown column 'generated_at' in 'RETURNING'",
|
||||
sql: "INSERT INTO `document_number_audit`(`id`, `generated_number`, `counter_key`, `template_used`, `sequence_number`, `user_id`, `ip_address`, `retry_count`, `lock_wait_ms`, `generated_at`) VALUES (DEFAULT, 'ผรม.1-ผรม.1-0002-2568', 'doc_num:1:1:0:2025', '{ORG}-{ORG}-{SEQ:4}-{YEAR}', 2, DEFAULT, DEFAULT, 0, 0, DEFAULT) RETURNING `id`, `retry_count`, `generated_at`"
|
||||
}
|
||||
[Nest] 35280 - 12/09/2025, 11:24:25 AM ERROR [WorkflowEngineService] Transition Failed for 3a51f630-c4fc-4fb4-8c2b-f1150195d8bd: Cannot read properties of undefined (reading 'roles')
|
||||
[Nest] 35280 - 12/09/2025, 11:24:25 AM ERROR [CorrespondenceWorkflowService] Failed to submit workflow: TypeError: Cannot read properties of undefined (reading 'roles')
|
||||
[Nest] 35280 - 12/09/2025, 11:24:25 AM ERROR [ExceptionsHandler] TypeError: Cannot read properties of undefined (reading 'roles')
|
||||
at WorkflowDslService.checkRequirements (D:\nap-dms.lcbp3\backend\src\modules\workflow-engine\workflow-dsl.service.ts:219:13)
|
||||
at WorkflowDslService.evaluate (D:\nap-dms.lcbp3\backend\src\modules\workflow-engine\workflow-dsl.service.ts:178:10)
|
||||
at WorkflowEngineService.processTransition (D:\nap-dms.lcbp3\backend\src\modules\workflow-engine\workflow-engine.service.ts:259:42)
|
||||
at processTicksAndRejections (node:internal/process/task_queues:105:5)
|
||||
at CorrespondenceWorkflowService.submitWorkflow (D:\nap-dms.lcbp3\backend\src\modules\correspondence\correspondence-workflow.service.ts:73:32)
|
||||
FAIL test/phase3-workflow.e2e-spec.ts
|
||||
ΓùÅ Console
|
||||
|
||||
console.log
|
||||
Created Correspondence ID: 4
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:84:13)
|
||||
|
||||
console.warn
|
||||
Skipping action test - no instanceId from submit
|
||||
|
||||
104 | // Skip if submit failed to get instanceId
|
||||
105 | if (!workflowInstanceId) {
|
||||
> 106 | console.warn('Skipping action test - no instanceId from submit');
|
||||
| ^
|
||||
107 | return;
|
||||
108 | }
|
||||
109 |
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:106:15)
|
||||
|
||||
● Phase 3 Workflow (E2E) › /correspondences/:id/submit (POST) - Submit to Workflow
|
||||
|
||||
expected 201 "Created", got 500 "Internal Server Error"
|
||||
|
||||
92 | note: 'Submitting for E2E test',
|
||||
93 | })
|
||||
> 94 | .expect(201);
|
||||
| ^
|
||||
95 |
|
||||
96 | expect(response.body).toHaveProperty('instanceId');
|
||||
97 | expect(response.body).toHaveProperty('currentState');
|
||||
|
||||
at Object.<anonymous> (phase3-workflow.e2e-spec.ts:94:8)
|
||||
----
|
||||
at Test._assertStatus (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:309:14)
|
||||
at ../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:365:13
|
||||
at Test._assertFunction (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:342:13)
|
||||
at Test.assert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:195:23)
|
||||
at localAssert (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:138:14)
|
||||
at Server.<anonymous> (../../node_modules/.pnpm/supertest@7.1.4/node_modules/supertest/lib/test.js:152:11)
|
||||
|
||||
A worker process has failed to exit gracefully and has been force exited. This is likely caused by tests leaking due to improper teardown. Try running with --detectOpenHandles to find leaks. Active timers can also cause this, ensure that .unref() was called on them.
|
||||
Test Suites: 1 failed, 2 passed, 3 total
|
||||
Tests: 1 failed, 4 passed, 5 total
|
||||
Snapshots: 0 total
|
||||
Time: 5.652 s
|
||||
Ran all test suites.
|
||||
ΓÇëELIFECYCLEΓÇë Command failed with exit code 1.
|
||||
@@ -0,0 +1,105 @@
|
||||
-- Migration: Align Schema with Documentation
|
||||
-- Version: 1733800000000
|
||||
-- Date: 2025-12-10
|
||||
-- Description: Add missing fields and fix column lengths to match schema v1.5.1
|
||||
-- ==========================================================
|
||||
-- Phase 1: Organizations Table Updates
|
||||
-- ==========================================================
|
||||
-- Add role_id column to organizations
|
||||
ALTER TABLE organizations
|
||||
ADD COLUMN role_id INT NULL COMMENT 'Reference to organization_roles table';
|
||||
|
||||
-- Add foreign key constraint
|
||||
ALTER TABLE organizations
|
||||
ADD CONSTRAINT fk_organizations_role FOREIGN KEY (role_id) REFERENCES organization_roles(id) ON DELETE
|
||||
SET NULL;
|
||||
|
||||
-- Modify organization_name length from 200 to 255
|
||||
ALTER TABLE organizations
|
||||
MODIFY COLUMN organization_name VARCHAR(255) NOT NULL COMMENT 'Organization name';
|
||||
|
||||
-- ==========================================================
|
||||
-- Phase 2: Users Table Updates (Security Fields)
|
||||
-- ==========================================================
|
||||
-- Add failed_attempts for login tracking
|
||||
ALTER TABLE users
|
||||
ADD COLUMN failed_attempts INT DEFAULT 0 COMMENT 'Number of failed login attempts';
|
||||
|
||||
-- Add locked_until for account lockout mechanism
|
||||
ALTER TABLE users
|
||||
ADD COLUMN locked_until DATETIME NULL COMMENT 'Account locked until this timestamp';
|
||||
|
||||
-- Add last_login_at for audit trail
|
||||
ALTER TABLE users
|
||||
ADD COLUMN last_login_at TIMESTAMP NULL COMMENT 'Last successful login timestamp';
|
||||
|
||||
-- ==========================================================
|
||||
-- Phase 3: Roles Table Updates
|
||||
-- ==========================================================
|
||||
-- Modify role_name length from 50 to 100
|
||||
ALTER TABLE roles
|
||||
MODIFY COLUMN role_name VARCHAR(100) NOT NULL COMMENT 'Role name';
|
||||
|
||||
-- ==========================================================
|
||||
-- Verification Queries
|
||||
-- ==========================================================
|
||||
-- Verify organizations table structure
|
||||
SELECT COLUMN_NAME,
|
||||
DATA_TYPE,
|
||||
CHARACTER_MAXIMUM_LENGTH,
|
||||
IS_NULLABLE,
|
||||
COLUMN_COMMENT
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_SCHEMA = DATABASE()
|
||||
AND TABLE_NAME = 'organizations'
|
||||
ORDER BY ORDINAL_POSITION;
|
||||
|
||||
-- Verify users table has new security fields
|
||||
SELECT COLUMN_NAME,
|
||||
DATA_TYPE,
|
||||
COLUMN_DEFAULT,
|
||||
IS_NULLABLE,
|
||||
COLUMN_COMMENT
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_SCHEMA = DATABASE()
|
||||
AND TABLE_NAME = 'users'
|
||||
AND COLUMN_NAME IN (
|
||||
'failed_attempts',
|
||||
'locked_until',
|
||||
'last_login_at'
|
||||
)
|
||||
ORDER BY ORDINAL_POSITION;
|
||||
|
||||
-- Verify roles table role_name length
|
||||
SELECT COLUMN_NAME,
|
||||
DATA_TYPE,
|
||||
CHARACTER_MAXIMUM_LENGTH
|
||||
FROM INFORMATION_SCHEMA.COLUMNS
|
||||
WHERE TABLE_SCHEMA = DATABASE()
|
||||
AND TABLE_NAME = 'roles'
|
||||
AND COLUMN_NAME = 'role_name';
|
||||
|
||||
-- ==========================================================
|
||||
-- Rollback Script (Use if needed)
|
||||
-- ==========================================================
|
||||
/*
|
||||
-- Rollback Phase 3: Roles
|
||||
ALTER TABLE roles
|
||||
MODIFY COLUMN role_name VARCHAR(50) NOT NULL;
|
||||
|
||||
-- Rollback Phase 2: Users
|
||||
ALTER TABLE users
|
||||
DROP COLUMN last_login_at,
|
||||
DROP COLUMN locked_until,
|
||||
DROP COLUMN failed_attempts;
|
||||
|
||||
-- Rollback Phase 1: Organizations
|
||||
ALTER TABLE organizations
|
||||
MODIFY COLUMN organization_name VARCHAR(200) NOT NULL;
|
||||
|
||||
ALTER TABLE organizations
|
||||
DROP FOREIGN KEY fk_organizations_role;
|
||||
|
||||
ALTER TABLE organizations
|
||||
DROP COLUMN role_id;
|
||||
*/
|
||||
@@ -1,40 +1,59 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"version": "0.0.1",
|
||||
"version": "1.5.1",
|
||||
"description": "",
|
||||
"author": "",
|
||||
"private": true,
|
||||
"license": "UNLICENSED",
|
||||
"scripts": {
|
||||
"build": "nest build",
|
||||
"doc": "npx @compodoc/compodoc -p tsconfig.doc.json -s",
|
||||
"format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
|
||||
"start": "nest start",
|
||||
"start:dev": "nest start --watch",
|
||||
"start:debug": "nest start --debug --watch",
|
||||
"start:prod": "node dist/main",
|
||||
"lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix",
|
||||
"test": "jest",
|
||||
"test": "jest --forceExit",
|
||||
"test:debug-handles": "jest --detectOpenHandles",
|
||||
"test:watch": "jest --watch",
|
||||
"test:cov": "jest --coverage",
|
||||
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json"
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json",
|
||||
"seed": "ts-node -r tsconfig-paths/register src/database/seeds/run-seed.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@casl/ability": "^6.7.3",
|
||||
"@elastic/elasticsearch": "^8.11.1",
|
||||
"@nestjs-modules/ioredis": "^2.0.2",
|
||||
"@nestjs/axios": "^4.0.1",
|
||||
"@nestjs/bullmq": "^11.0.4",
|
||||
"@nestjs/cache-manager": "^3.0.1",
|
||||
"@nestjs/common": "^11.0.1",
|
||||
"@nestjs/config": "^4.0.2",
|
||||
"@nestjs/core": "^11.0.1",
|
||||
"@nestjs/elasticsearch": "^11.1.0",
|
||||
"@nestjs/jwt": "^11.0.1",
|
||||
"@nestjs/mapped-types": "^2.1.0",
|
||||
"@nestjs/passport": "^11.0.5",
|
||||
"@nestjs/platform-express": "^11.0.1",
|
||||
"@nestjs/platform-socket.io": "^11.1.9",
|
||||
"@nestjs/schedule": "^6.0.1",
|
||||
"@nestjs/swagger": "^11.2.3",
|
||||
"@nestjs/terminus": "^11.0.0",
|
||||
"@nestjs/throttler": "^6.4.0",
|
||||
"@nestjs/typeorm": "^11.0.0",
|
||||
"@nestjs/websockets": "^11.1.9",
|
||||
"@types/nodemailer": "^7.0.4",
|
||||
"@willsoto/nestjs-prometheus": "^6.0.2",
|
||||
"ajv": "^8.17.1",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"async-retry": "^1.3.3",
|
||||
"axios": "^1.13.2",
|
||||
"bcrypt": "^6.0.0",
|
||||
"bullmq": "^5.63.2",
|
||||
"cache-manager": "^7.2.5",
|
||||
"cache-manager-redis-yet": "^5.1.5",
|
||||
"class-transformer": "^0.5.1",
|
||||
"class-validator": "^0.14.2",
|
||||
"fs-extra": "^11.3.2",
|
||||
@@ -43,30 +62,42 @@
|
||||
"joi": "^18.0.1",
|
||||
"multer": "^2.0.2",
|
||||
"mysql2": "^3.15.3",
|
||||
"nest-winston": "^1.10.2",
|
||||
"nodemailer": "^7.0.10",
|
||||
"opossum": "^9.0.0",
|
||||
"passport": "^0.7.0",
|
||||
"passport-jwt": "^4.0.1",
|
||||
"prom-client": "^15.1.3",
|
||||
"redlock": "5.0.0-beta.2",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"rxjs": "^7.8.1",
|
||||
"socket.io": "^4.8.1",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"typeorm": "^0.3.27",
|
||||
"uuid": "^13.0.0"
|
||||
"uuid": "^9.0.1",
|
||||
"winston": "^3.18.3",
|
||||
"zod": "^4.1.13"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@compodoc/compodoc": "^1.1.32",
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@eslint/js": "^9.18.0",
|
||||
"@nestjs/cli": "^11.0.0",
|
||||
"@nestjs/schematics": "^11.0.0",
|
||||
"@nestjs/testing": "^11.0.1",
|
||||
"@types/async-retry": "^1.4.9",
|
||||
"@types/bcrypt": "^6.0.0",
|
||||
"@types/cache-manager": "^5.0.0",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/fs-extra": "^11.0.4",
|
||||
"@types/ioredis": "^5.0.0",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/multer": "^2.0.0",
|
||||
"@types/node": "^22.10.7",
|
||||
"@types/opossum": "^8.1.9",
|
||||
"@types/passport-jwt": "^4.0.1",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@types/uuid": "^11.0.0",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"eslint": "^9.18.0",
|
||||
"eslint-config-prettier": "^10.0.1",
|
||||
"eslint-plugin-prettier": "^5.2.2",
|
||||
|
||||
7437
backend/pnpm-lock.yaml
generated
7437
backend/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
31
backend/scripts/check-connection.ts
Normal file
31
backend/scripts/check-connection.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { DataSource } from 'typeorm';
|
||||
import { databaseConfig } from '../src/config/database.config';
|
||||
import * as dotenv from 'dotenv';
|
||||
import { MysqlConnectionOptions } from 'typeorm/driver/mysql/MysqlConnectionOptions';
|
||||
|
||||
dotenv.config();
|
||||
|
||||
async function checkConnection() {
|
||||
console.log('Checking database connection...');
|
||||
console.log(`Host: ${process.env.DB_HOST}`);
|
||||
console.log(`Port: ${process.env.DB_PORT}`);
|
||||
console.log(`User: ${process.env.DB_USERNAME}`);
|
||||
console.log(`Database: ${process.env.DB_DATABASE}`);
|
||||
|
||||
const dataSource = new DataSource(databaseConfig as MysqlConnectionOptions);
|
||||
|
||||
try {
|
||||
await dataSource.initialize();
|
||||
console.log('✅ Connection initialized successfully!');
|
||||
|
||||
const result = await dataSource.query('SHOW COLUMNS FROM rfa_types');
|
||||
console.log('rfa_types columns:', result);
|
||||
|
||||
await dataSource.destroy();
|
||||
} catch (error) {
|
||||
console.error('❌ Connection failed:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
checkConnection();
|
||||
52
backend/scripts/debug-db.ts
Normal file
52
backend/scripts/debug-db.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { DataSource } from 'typeorm';
|
||||
import * as fs from 'fs';
|
||||
|
||||
// Read .env to get DB config
|
||||
const envFile = fs.readFileSync('.env', 'utf8');
|
||||
const getEnv = (key: string) => {
|
||||
const line = envFile.split('\n').find(l => l.startsWith(key + '='));
|
||||
return line ? line.split('=')[1].trim() : '';
|
||||
};
|
||||
|
||||
const dataSource = new DataSource({
|
||||
type: 'mariadb',
|
||||
host: getEnv('DB_HOST') || 'localhost',
|
||||
port: parseInt(getEnv('DB_PORT') || '3306'),
|
||||
username: getEnv('DB_USERNAME') || 'admin',
|
||||
password: getEnv('DB_PASSWORD') || 'Center2025',
|
||||
database: getEnv('DB_DATABASE') || 'lcbp3_dev',
|
||||
entities: [],
|
||||
synchronize: false,
|
||||
});
|
||||
|
||||
async function main() {
|
||||
await dataSource.initialize();
|
||||
console.log('Connected to DB');
|
||||
|
||||
try {
|
||||
const assignments = await dataSource.query('SELECT * FROM user_assignments');
|
||||
console.log('All Assignments:', assignments);
|
||||
|
||||
// Check if User 3 has any assignment
|
||||
const user3Assign = assignments.find((a: any) => a.user_id === 3);
|
||||
if (!user3Assign) {
|
||||
console.log('User 3 has NO assignments.');
|
||||
// Try to insert assignment for User 3 (Editor)
|
||||
console.log('Inserting assignment for User 3 (Role 4, Org 41)...');
|
||||
await dataSource.query(`
|
||||
INSERT INTO user_assignments (user_id, role_id, organization_id, assigned_by_user_id)
|
||||
VALUES (3, 4, 41, 1)
|
||||
`);
|
||||
console.log('Inserted assignment for User 3.');
|
||||
} else {
|
||||
console.log('User 3 Assignment:', user3Assign);
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
} finally {
|
||||
await dataSource.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
126
backend/scripts/verify-workflow.ts
Normal file
126
backend/scripts/verify-workflow.ts
Normal file
@@ -0,0 +1,126 @@
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
// Configuration
|
||||
const JWT_SECRET =
|
||||
'eebc122aa65adde8c76c6a0847d9649b2b67a06db1504693e6c912e51499b76e';
|
||||
const API_URL = 'http://localhost:3000/api';
|
||||
|
||||
// Helper to sign JWT
|
||||
function signJwt(payload: any) {
|
||||
const header = { alg: 'HS256', typ: 'JWT' };
|
||||
const encodedHeader = Buffer.from(JSON.stringify(header)).toString(
|
||||
'base64url',
|
||||
);
|
||||
const encodedPayload = Buffer.from(JSON.stringify(payload)).toString(
|
||||
'base64url',
|
||||
);
|
||||
|
||||
const signature = crypto
|
||||
.createHmac('sha256', JWT_SECRET)
|
||||
.update(encodedHeader + '.' + encodedPayload)
|
||||
.digest('base64url');
|
||||
|
||||
return `${encodedHeader}.${encodedPayload}.${signature}`;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
// 1. Generate Token for Editor01 (ID 3)
|
||||
const token = signJwt({ username: 'editor01', sub: 3 });
|
||||
console.log('Generated Token:', token);
|
||||
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`,
|
||||
};
|
||||
|
||||
try {
|
||||
// 1.5 Check Permissions
|
||||
console.log('\nChecking Permissions...');
|
||||
const permRes = await fetch(`${API_URL}/users/me/permissions`, { headers });
|
||||
if (permRes.ok) {
|
||||
const perms = await permRes.json();
|
||||
console.log('My Permissions:', perms);
|
||||
} else {
|
||||
console.error(
|
||||
'Failed to get permissions:',
|
||||
permRes.status,
|
||||
await permRes.text(),
|
||||
);
|
||||
}
|
||||
|
||||
// 2. Create Correspondence
|
||||
console.log('\nCreating Correspondence...');
|
||||
const createRes = await fetch(`${API_URL}/correspondences`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
projectId: 1,
|
||||
typeId: 1, // Assuming ID 1 exists (e.g., RFA or Memo)
|
||||
// originatorId: 1, // Removed for Admin user
|
||||
title: 'Manual Verification Doc',
|
||||
details: { note: 'Created via script' },
|
||||
}),
|
||||
});
|
||||
|
||||
if (!createRes.ok) {
|
||||
throw new Error(
|
||||
`Create failed: ${createRes.status} ${await createRes.text()}`,
|
||||
);
|
||||
}
|
||||
|
||||
const doc: any = await createRes.json();
|
||||
console.log('Created Document:', doc.id, doc.correspondenceNumber);
|
||||
|
||||
// 3. Submit Workflow
|
||||
console.log('\nSubmitting Workflow...');
|
||||
const submitRes = await fetch(
|
||||
`${API_URL}/correspondences/${doc.id}/submit`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
templateId: 1, // Assuming Template ID 1 exists
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
if (!submitRes.ok) {
|
||||
const text = await submitRes.text();
|
||||
console.error(`Submit failed: ${submitRes.status} ${text}`);
|
||||
if (text.includes('template')) {
|
||||
console.warn(
|
||||
'⚠️ Template ID 1 not found. Please ensure a Routing Template exists.',
|
||||
);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Workflow Submitted Successfully');
|
||||
|
||||
// 4. Approve Workflow (as same user for simplicity, assuming logic allows or user has permission)
|
||||
console.log('\nApproving Workflow...');
|
||||
const approveRes = await fetch(
|
||||
`${API_URL}/correspondences/${doc.id}/workflow/action`,
|
||||
{
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({
|
||||
action: 'APPROVE',
|
||||
comment: 'Approved via script',
|
||||
}),
|
||||
},
|
||||
);
|
||||
|
||||
if (!approveRes.ok) {
|
||||
throw new Error(
|
||||
`Approve failed: ${approveRes.status} ${await approveRes.text()}`,
|
||||
);
|
||||
}
|
||||
|
||||
console.log('Workflow Approved Successfully');
|
||||
} catch (error: any) {
|
||||
console.error('Error:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((err) => console.error(err));
|
||||
125
backend/src/Workflow DSL Specification.md
Normal file
125
backend/src/Workflow DSL Specification.md
Normal file
@@ -0,0 +1,125 @@
|
||||
# **Workflow DSL Specification v1.0**
|
||||
|
||||
เอกสารนี้ระบุโครงสร้างภาษา (Domain-Specific Language) สำหรับกำหนด Business Logic ของการเดินเอกสารในระบบ LCBP3-DMS
|
||||
|
||||
## **1\. โครงสร้างหลัก (Root Structure)**
|
||||
|
||||
ไฟล์ Definition ต้องอยู่ในรูปแบบ YAML หรือ JSON โดยมีโครงสร้างดังนี้:
|
||||
|
||||
```json
|
||||
workflow: "RFA_FLOW" # รหัส Workflow (Unique)
|
||||
version: 1 # เวอร์ชันของ Logic
|
||||
description: "RFA Approval Process" # คำอธิบาย
|
||||
|
||||
# รายการสถานะทั้งหมดที่เป็นไปได้
|
||||
states:
|
||||
- name: "DRAFT" # ชื่อสถานะ (Case-sensitive)
|
||||
initial: true # เป็นสถานะเริ่มต้น (ต้องมี 1 สถานะ)
|
||||
on: # รายการ Action ที่ทำได้จากสถานะนี้
|
||||
SUBMIT: # ชื่อ Action (ปุ่มที่ User กด)
|
||||
to: "IN_REVIEW" # สถานะปลายทาง
|
||||
require: # (Optional) เงื่อนไขสิทธิ์
|
||||
role: "EDITOR"
|
||||
events: # (Optional) เหตุการณ์ที่จะเกิดขึ้นเมื่อเปลี่ยนสถานะ
|
||||
- type: "notify"
|
||||
target: "reviewer"
|
||||
|
||||
- name: "IN_REVIEW"
|
||||
on:
|
||||
APPROVE:
|
||||
to: "APPROVED"
|
||||
condition: "context.amount < 1000000" # (Optional) JS Expression
|
||||
REJECT:
|
||||
to: "DRAFT"
|
||||
events:
|
||||
- type: "notify"
|
||||
target: "creator"
|
||||
|
||||
- name: "APPROVED"
|
||||
terminal: true # เป็นสถานะจบ (ไม่สามารถไปต่อได้)
|
||||
```
|
||||
|
||||
## **2. รายละเอียด Field (Field Definitions)**
|
||||
|
||||
### **2.1 State Object**
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
| :------- | :------ | :------- | :--------------------------------------------- |
|
||||
| name | string | Yes | ชื่อสถานะ (Unique Key) |
|
||||
| initial | boolean | No | ระบุว่าเป็นจุดเริ่มต้น (ต้องมี 1 state ในระบบ) |
|
||||
| terminal | boolean | No | ระบุว่าเป็นจุดสิ้นสุด |
|
||||
| on | object | No | Map ของ Action -> Transition Rule |
|
||||
|
||||
### **2.2 Transition Rule Object**
|
||||
|
||||
| Field | Type | Required | Description |
|
||||
| :-------- | :----- | :------- | :-------------------------------------- |
|
||||
| to | string | Yes | ชื่อสถานะปลายทาง |
|
||||
| require | object | No | เงื่อนไข Role/User |
|
||||
| condition | string | No | JavaScript Expression (return boolean) |
|
||||
| events | array | No | Side-effects ที่จะทำงานหลังเปลี่ยนสถานะ |
|
||||
|
||||
### **2.3 Requirements Object**
|
||||
|
||||
| Field | Type | Description |
|
||||
| :---- | :----- | :------------------------------------------ |
|
||||
| role | string | User ต้องมี Role นี้ (เช่น PROJECT_MANAGER) |
|
||||
| user | string | User ต้องมี ID นี้ (Hard-code) |
|
||||
|
||||
### **2.4 Event Object**
|
||||
|
||||
| Field | Type | Description |
|
||||
| :------- | :----- | :----------------------------------------- |
|
||||
| type | string | notify, webhook, update_status |
|
||||
| target | string | ผู้รับ (เช่น creator, assignee, หรือ Role) |
|
||||
| template | string | รหัส Template ข้อความ |
|
||||
|
||||
## **3\. ตัวอย่างการใช้งานจริง (Real-world Examples)**
|
||||
|
||||
### **ตัวอย่าง: RFA Approval Flow**
|
||||
|
||||
```json
|
||||
{
|
||||
"workflow": "RFA_STD",
|
||||
"version": 1,
|
||||
"states": [
|
||||
{
|
||||
"name": "DRAFT",
|
||||
"initial": true,
|
||||
"on": {
|
||||
"SUBMIT": {
|
||||
"to": "CONSULTANT_REVIEW",
|
||||
"require": { "role": "CONTRACTOR" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "CONSULTANT_REVIEW",
|
||||
"on": {
|
||||
"APPROVE_1": {
|
||||
"to": "OWNER_REVIEW",
|
||||
"condition": "context.priority === 'HIGH'"
|
||||
},
|
||||
"APPROVE_2": {
|
||||
"to": "APPROVED",
|
||||
"condition": "context.priority === 'NORMAL'"
|
||||
},
|
||||
"REJECT": {
|
||||
"to": "DRAFT"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "OWNER_REVIEW",
|
||||
"on": {
|
||||
"APPROVE": { "to": "APPROVED" },
|
||||
"REJECT": { "to": "CONSULTANT_REVIEW" }
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "APPROVED",
|
||||
"terminal": true
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
@@ -1,47 +1,98 @@
|
||||
// File: src/app.module.ts
|
||||
// บันทึกการแก้ไข: เพิ่ม CacheModule (Redis), Config สำหรับ Idempotency และ Maintenance Mode (T1.1)
|
||||
// บันทึกการแก้ไข: เพิ่ม MonitoringModule และ WinstonModule (T6.3)
|
||||
// เพิ่ม MasterModule
|
||||
|
||||
import { Module } from '@nestjs/common';
|
||||
import { APP_GUARD } from '@nestjs/core'; // <--- เพิ่ม Import นี้ T2.4
|
||||
import { APP_GUARD, APP_INTERCEPTOR } from '@nestjs/core';
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||
import { BullModule } from '@nestjs/bullmq'; // Import BullModule
|
||||
import { ThrottlerModule, ThrottlerGuard } from '@nestjs/throttler'; // <--- เพิ่ม Import นี้ T2.4
|
||||
import { BullModule } from '@nestjs/bullmq';
|
||||
import { ThrottlerModule, ThrottlerGuard } from '@nestjs/throttler';
|
||||
import { CacheModule } from '@nestjs/cache-manager';
|
||||
import { WinstonModule } from 'nest-winston';
|
||||
import { redisStore } from 'cache-manager-redis-yet';
|
||||
import { RedisModule } from '@nestjs-modules/ioredis';
|
||||
|
||||
import { AppController } from './app.controller';
|
||||
import { AppService } from './app.service';
|
||||
import { envValidationSchema } from './common/config/env.validation.js'; // สังเกต .js สำหรับ ESM
|
||||
// import { CommonModule } from './common/common.module';
|
||||
import { envValidationSchema } from './common/config/env.validation.js';
|
||||
import redisConfig from './common/config/redis.config';
|
||||
import { winstonConfig } from './modules/monitoring/logger/winston.config';
|
||||
|
||||
// Entities & Interceptors
|
||||
import { AuditLog } from './common/entities/audit-log.entity';
|
||||
import { AuditLogInterceptor } from './common/interceptors/audit-log.interceptor';
|
||||
import { MaintenanceModeGuard } from './common/guards/maintenance-mode.guard';
|
||||
|
||||
// Modules
|
||||
import { AuthModule } from './common/auth/auth.module.js';
|
||||
import { UserModule } from './modules/user/user.module';
|
||||
import { ProjectModule } from './modules/project/project.module';
|
||||
import { FileStorageModule } from './modules/file-storage/file-storage.module';
|
||||
import { OrganizationModule } from './modules/organization/organization.module';
|
||||
import { ContractModule } from './modules/contract/contract.module';
|
||||
import { MasterModule } from './modules/master/master.module'; // [NEW] ✅ เพิ่ม MasterModule
|
||||
import { FileStorageModule } from './common/file-storage/file-storage.module.js';
|
||||
import { DocumentNumberingModule } from './modules/document-numbering/document-numbering.module';
|
||||
import { AuthModule } from './common/auth/auth.module.js'; // <--- เพิ่ม Import นี้ T2.4
|
||||
import { JsonSchemaModule } from './modules/json-schema/json-schema.module.js';
|
||||
import { WorkflowEngineModule } from './modules/workflow-engine/workflow-engine.module';
|
||||
import { CorrespondenceModule } from './modules/correspondence/correspondence.module';
|
||||
import { RfaModule } from './modules/rfa/rfa.module';
|
||||
import { DrawingModule } from './modules/drawing/drawing.module';
|
||||
import { TransmittalModule } from './modules/transmittal/transmittal.module';
|
||||
import { CirculationModule } from './modules/circulation/circulation.module';
|
||||
import { NotificationModule } from './modules/notification/notification.module';
|
||||
import { DashboardModule } from './modules/dashboard/dashboard.module';
|
||||
import { MonitoringModule } from './modules/monitoring/monitoring.module';
|
||||
import { ResilienceModule } from './common/resilience/resilience.module';
|
||||
import { SearchModule } from './modules/search/search.module';
|
||||
import { AuditLogModule } from './modules/audit-log/audit-log.module';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
// 1. Setup Config Module พร้อม Validation
|
||||
ConfigModule.forRoot({
|
||||
isGlobal: true, // เรียกใช้ได้ทั่วทั้ง App ไม่ต้อง import ซ้ำ
|
||||
envFilePath: '.env', // อ่านไฟล์ .env (สำหรับ Dev)
|
||||
validationSchema: envValidationSchema, // ใช้ Schema ที่เราสร้างเพื่อตรวจสอบ
|
||||
isGlobal: true,
|
||||
envFilePath: '.env',
|
||||
load: [redisConfig],
|
||||
validationSchema: envValidationSchema,
|
||||
validationOptions: {
|
||||
// ถ้ามีค่าไหนไม่ผ่าน Validation ให้ Error และหยุดทำงานทันที
|
||||
abortEarly: true,
|
||||
},
|
||||
}),
|
||||
// 🛡️ T2.4 1. Setup Throttler Module (Rate Limiting)
|
||||
|
||||
// 🛡️ Setup Throttler Module (Rate Limiting)
|
||||
ThrottlerModule.forRoot([
|
||||
{
|
||||
ttl: 60000, // 60 วินาที (Time to Live)
|
||||
limit: 100, // ยิงได้สูงสุด 100 ครั้ง (Global Default)
|
||||
ttl: 60000, // 60 วินาที
|
||||
limit: 100, // ยิงได้สูงสุด 100 ครั้ง
|
||||
},
|
||||
]),
|
||||
|
||||
// 💾 Setup Cache Module (Redis)
|
||||
CacheModule.registerAsync({
|
||||
isGlobal: true,
|
||||
imports: [ConfigModule],
|
||||
useFactory: async (configService: ConfigService) => ({
|
||||
store: await redisStore({
|
||||
socket: {
|
||||
host: configService.get<string>('redis.host'),
|
||||
port: configService.get<number>('redis.port'),
|
||||
},
|
||||
ttl: configService.get<number>('redis.ttl'),
|
||||
}),
|
||||
}),
|
||||
inject: [ConfigService],
|
||||
}),
|
||||
|
||||
// 📝 Setup Winston Logger
|
||||
WinstonModule.forRoot(winstonConfig),
|
||||
|
||||
// 2. Setup TypeORM (MariaDB)
|
||||
TypeOrmModule.forRootAsync({
|
||||
imports: [ConfigModule],
|
||||
inject: [ConfigService],
|
||||
useFactory: async (configService: ConfigService) => ({
|
||||
useFactory: (configService: ConfigService) => ({
|
||||
type: 'mariadb',
|
||||
host: configService.get<string>('DB_HOST'),
|
||||
port: configService.get<number>('DB_PORT'),
|
||||
@@ -49,19 +100,18 @@ import { CorrespondenceModule } from './modules/correspondence/correspondence.mo
|
||||
password: configService.get<string>('DB_PASSWORD'),
|
||||
database: configService.get<string>('DB_DATABASE'),
|
||||
autoLoadEntities: true,
|
||||
// synchronize: true เฉพาะตอน Dev เท่านั้น ห้ามใช้บน Prod
|
||||
// synchronize: configService.get<string>('NODE_ENV') === 'development',
|
||||
// แก้บรรทัดนี้เป็น false ครับ
|
||||
// เพราะเราใช้ SQL Script สร้าง DB แล้ว ไม่ต้องการให้ TypeORM มาแก้ Structure อัตโนมัติ
|
||||
synchronize: false, // เราใช้ false ตามที่ตกลงกัน
|
||||
synchronize: false, // Production Ready: false
|
||||
}),
|
||||
}),
|
||||
|
||||
// 3. BullMQ (Redis) Setup [NEW]
|
||||
// Register AuditLog Entity (Global Scope)
|
||||
TypeOrmModule.forFeature([AuditLog]),
|
||||
|
||||
// 3. BullMQ (Redis) Setup
|
||||
BullModule.forRootAsync({
|
||||
imports: [ConfigModule],
|
||||
inject: [ConfigService],
|
||||
useFactory: async (configService: ConfigService) => ({
|
||||
useFactory: (configService: ConfigService) => ({
|
||||
connection: {
|
||||
host: configService.get<string>('REDIS_HOST'),
|
||||
port: configService.get<number>('REDIS_PORT'),
|
||||
@@ -69,24 +119,64 @@ import { CorrespondenceModule } from './modules/correspondence/correspondence.mo
|
||||
},
|
||||
}),
|
||||
}),
|
||||
|
||||
// Setup Redis Module (for InjectRedis)
|
||||
RedisModule.forRootAsync({
|
||||
imports: [ConfigModule],
|
||||
useFactory: (configService: ConfigService) => ({
|
||||
type: 'single',
|
||||
url: `redis://${configService.get('REDIS_HOST')}:${configService.get('REDIS_PORT')}`,
|
||||
options: {
|
||||
password: configService.get('REDIS_PASSWORD'),
|
||||
},
|
||||
}),
|
||||
inject: [ConfigService],
|
||||
}),
|
||||
|
||||
// 📊 Monitoring & Resilience
|
||||
MonitoringModule,
|
||||
ResilienceModule,
|
||||
|
||||
// 📦 Feature Modules
|
||||
AuthModule,
|
||||
// CommonModule,
|
||||
UserModule,
|
||||
UserModule,
|
||||
ProjectModule,
|
||||
OrganizationModule,
|
||||
ContractModule,
|
||||
MasterModule, // ✅ [NEW] Register MasterModule here
|
||||
FileStorageModule,
|
||||
DocumentNumberingModule,
|
||||
JsonSchemaModule,
|
||||
WorkflowEngineModule,
|
||||
CorrespondenceModule, // <--- เพิ่ม
|
||||
CorrespondenceModule,
|
||||
RfaModule,
|
||||
DrawingModule,
|
||||
TransmittalModule,
|
||||
CirculationModule,
|
||||
SearchModule,
|
||||
NotificationModule,
|
||||
DashboardModule,
|
||||
AuditLogModule,
|
||||
],
|
||||
controllers: [AppController],
|
||||
providers: [
|
||||
AppService,
|
||||
// 🛡️ 2. Register Global Guard
|
||||
// 🛡️ 1. Register Global Guard (Rate Limit)
|
||||
{
|
||||
provide: APP_GUARD,
|
||||
useClass: ThrottlerGuard,
|
||||
},
|
||||
// 🚧 2. Maintenance Mode Guard
|
||||
{
|
||||
provide: APP_GUARD,
|
||||
useClass: MaintenanceModeGuard,
|
||||
},
|
||||
// 📝 3. Register Global Interceptor (Audit Log)
|
||||
{
|
||||
provide: APP_INTERCEPTOR,
|
||||
useClass: AuditLogInterceptor,
|
||||
},
|
||||
],
|
||||
})
|
||||
export class AppModule {}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { AutController } from './aut.controller';
|
||||
|
||||
describe('AutController', () => {
|
||||
let controller: AutController;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [AutController],
|
||||
}).compile();
|
||||
|
||||
controller = module.get<AutController>(AutController);
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(controller).toBeDefined();
|
||||
});
|
||||
});
|
||||
@@ -1,4 +0,0 @@
|
||||
import { Controller } from '@nestjs/common';
|
||||
|
||||
@Controller('aut')
|
||||
export class AutController {}
|
||||
@@ -1,30 +1,86 @@
|
||||
import { Controller, Post, Body, UnauthorizedException } from '@nestjs/common';
|
||||
import { AuthService } from './auth.service.js';
|
||||
import { LoginDto } from './dto/login.dto.js'; // <--- Import DTO
|
||||
import { RegisterDto } from './dto/register.dto.js'; // <--- Import DTO
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
import { AuthController } from './auth.controller';
|
||||
import { AuthService } from './auth.service';
|
||||
|
||||
@Controller('auth')
|
||||
export class AuthController {
|
||||
constructor(private authService: AuthService) {}
|
||||
describe('AuthController', () => {
|
||||
let controller: AuthController;
|
||||
let mockAuthService: Partial<AuthService>;
|
||||
|
||||
@Post('login')
|
||||
// เปลี่ยน @Body() req เป็น @Body() loginDto: LoginDto
|
||||
async login(@Body() loginDto: LoginDto) {
|
||||
const user = await this.authService.validateUser(
|
||||
loginDto.username,
|
||||
loginDto.password,
|
||||
);
|
||||
beforeEach(async () => {
|
||||
mockAuthService = {
|
||||
validateUser: jest.fn(),
|
||||
login: jest.fn(),
|
||||
register: jest.fn(),
|
||||
refreshToken: jest.fn(),
|
||||
logout: jest.fn(),
|
||||
};
|
||||
|
||||
if (!user) {
|
||||
throw new UnauthorizedException('Invalid credentials');
|
||||
}
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
controllers: [AuthController],
|
||||
providers: [
|
||||
{
|
||||
provide: AuthService,
|
||||
useValue: mockAuthService,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
return this.authService.login(user);
|
||||
}
|
||||
controller = module.get<AuthController>(AuthController);
|
||||
});
|
||||
|
||||
@Post('register-admin')
|
||||
// เปลี่ยน @Body() req เป็น @Body() registerDto: RegisterDto
|
||||
async register(@Body() registerDto: RegisterDto) {
|
||||
return this.authService.register(registerDto);
|
||||
}
|
||||
}
|
||||
it('should be defined', () => {
|
||||
expect(controller).toBeDefined();
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should return tokens when credentials are valid', async () => {
|
||||
const loginDto = { username: 'test', password: 'password' };
|
||||
const mockUser = { user_id: 1, username: 'test' };
|
||||
const mockTokens = {
|
||||
access_token: 'access_token',
|
||||
refresh_token: 'refresh_token',
|
||||
user: mockUser,
|
||||
};
|
||||
|
||||
(mockAuthService.validateUser as jest.Mock).mockResolvedValue(mockUser);
|
||||
(mockAuthService.login as jest.Mock).mockResolvedValue(mockTokens);
|
||||
|
||||
const result = await controller.login(loginDto);
|
||||
|
||||
expect(mockAuthService.validateUser).toHaveBeenCalledWith(
|
||||
'test',
|
||||
'password'
|
||||
);
|
||||
expect(mockAuthService.login).toHaveBeenCalledWith(mockUser);
|
||||
expect(result).toEqual(mockTokens);
|
||||
});
|
||||
|
||||
it('should throw UnauthorizedException when credentials are invalid', async () => {
|
||||
const loginDto = { username: 'test', password: 'wrong' };
|
||||
(mockAuthService.validateUser as jest.Mock).mockResolvedValue(null);
|
||||
|
||||
await expect(controller.login(loginDto)).rejects.toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('register', () => {
|
||||
it('should register a new user', async () => {
|
||||
const registerDto = {
|
||||
username: 'newuser',
|
||||
password: 'password',
|
||||
email: 'test@test.com',
|
||||
display_name: 'Test User',
|
||||
};
|
||||
const mockUser = { user_id: 1, ...registerDto };
|
||||
|
||||
(mockAuthService.register as jest.Mock).mockResolvedValue(mockUser);
|
||||
|
||||
const result = await controller.register(registerDto);
|
||||
|
||||
expect(mockAuthService.register).toHaveBeenCalledWith(registerDto);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,21 +1,65 @@
|
||||
import { Controller, Post, Body, UnauthorizedException } from '@nestjs/common';
|
||||
import { Throttle } from '@nestjs/throttler'; // <--- ✅ เพิ่มบรรทัดนี้ครับ
|
||||
import { AuthService } from './auth.service.js';
|
||||
import { LoginDto } from './dto/login.dto.js'; // <--- Import DTO
|
||||
import { RegisterDto } from './dto/register.dto.js'; // <--- Import DTO
|
||||
// File: src/common/auth/auth.controller.ts
|
||||
// บันทึกการแก้ไข: เพิ่ม Type ให้ req และแก้ไข Import (Fix TS7006)
|
||||
|
||||
import {
|
||||
Controller,
|
||||
Post,
|
||||
Body,
|
||||
Get,
|
||||
UseGuards,
|
||||
UnauthorizedException,
|
||||
Req,
|
||||
HttpCode,
|
||||
HttpStatus,
|
||||
Delete,
|
||||
Param,
|
||||
} from '@nestjs/common';
|
||||
import { Throttle } from '@nestjs/throttler';
|
||||
import { AuthService } from './auth.service';
|
||||
import { LoginDto } from './dto/login.dto';
|
||||
import { RegisterDto } from './dto/register.dto';
|
||||
import { JwtAuthGuard } from '../guards/jwt-auth.guard';
|
||||
import { JwtRefreshGuard } from '../guards/jwt-refresh.guard';
|
||||
import {
|
||||
ApiTags,
|
||||
ApiOperation,
|
||||
ApiBearerAuth,
|
||||
ApiResponse,
|
||||
ApiBody,
|
||||
} from '@nestjs/swagger';
|
||||
import { Request } from 'express';
|
||||
|
||||
// สร้าง Interface สำหรับ Request ที่มี User
|
||||
interface RequestWithUser extends Request {
|
||||
user: any;
|
||||
}
|
||||
|
||||
@ApiTags('Authentication')
|
||||
@Controller('auth')
|
||||
export class AuthController {
|
||||
constructor(private authService: AuthService) {}
|
||||
|
||||
@Post('login')
|
||||
// เพิ่มความเข้มงวดให้ Login (กัน Brute Force)
|
||||
@Throttle({ default: { limit: 10, ttl: 60000 } }) // 🔒 ให้ลองได้แค่ 5 ครั้ง ใน 1 นาที
|
||||
// เปลี่ยน @Body() req เป็น @Body() loginDto: LoginDto
|
||||
@Throttle({ default: { limit: 5, ttl: 60000 } })
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@ApiOperation({ summary: 'Login to get Access & Refresh Token' })
|
||||
@ApiBody({ type: LoginDto })
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: 'Login successful',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
access_token: { type: 'string' },
|
||||
refresh_token: { type: 'string' },
|
||||
user: { type: 'object' },
|
||||
},
|
||||
},
|
||||
})
|
||||
async login(@Body() loginDto: LoginDto) {
|
||||
const user = await this.authService.validateUser(
|
||||
loginDto.username,
|
||||
loginDto.password,
|
||||
loginDto.password
|
||||
);
|
||||
|
||||
if (!user) {
|
||||
@@ -26,15 +70,84 @@ export class AuthController {
|
||||
}
|
||||
|
||||
@Post('register-admin')
|
||||
// เปลี่ยน @Body() req เป็น @Body() registerDto: RegisterDto
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@ApiBearerAuth()
|
||||
@ApiOperation({ summary: 'Create new user (Admin Only)' })
|
||||
@ApiBody({ type: RegisterDto })
|
||||
@ApiResponse({ status: 201, description: 'User registered' })
|
||||
async register(@Body() registerDto: RegisterDto) {
|
||||
return this.authService.register(registerDto);
|
||||
}
|
||||
/*ตัวอย่าง: ยกเว้นการนับ (เช่น Health Check)
|
||||
import { SkipThrottle } from '@nestjs/throttler';
|
||||
|
||||
@SkipThrottle()
|
||||
@Get('health')
|
||||
check() { ... }
|
||||
*/
|
||||
@UseGuards(JwtRefreshGuard)
|
||||
@Post('refresh')
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@ApiBearerAuth()
|
||||
@ApiOperation({ summary: 'Refresh Access Token using Refresh Token' })
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: 'Token refreshed',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
access_token: { type: 'string' },
|
||||
refresh_token: { type: 'string' },
|
||||
},
|
||||
},
|
||||
})
|
||||
async refresh(@Req() req: RequestWithUser) {
|
||||
return this.authService.refreshToken(req.user.sub, req.user.refreshToken);
|
||||
}
|
||||
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@Post('logout')
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@ApiBearerAuth()
|
||||
@ApiOperation({ summary: 'Logout (Revoke Tokens)' })
|
||||
@ApiResponse({
|
||||
status: 200,
|
||||
description: 'Logged out successfully',
|
||||
schema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
message: { type: 'string', example: 'Logged out successfully' },
|
||||
},
|
||||
},
|
||||
})
|
||||
async logout(@Req() req: RequestWithUser) {
|
||||
const token = req.headers.authorization?.split(' ')[1];
|
||||
if (!token) {
|
||||
return { message: 'No token provided' };
|
||||
}
|
||||
// ส่ง refresh token ไปด้วยถ้ามี (ใน header หรือ body)
|
||||
// สำหรับตอนนี้ส่งแค่ access token ไป blacklist
|
||||
return this.authService.logout(req.user.sub, token);
|
||||
}
|
||||
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@Get('profile')
|
||||
@ApiBearerAuth()
|
||||
@ApiOperation({ summary: 'Get current user profile' })
|
||||
@ApiResponse({ status: 200, description: 'User profile' })
|
||||
getProfile(@Req() req: RequestWithUser) {
|
||||
return req.user;
|
||||
}
|
||||
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@Get('sessions')
|
||||
@ApiBearerAuth()
|
||||
@ApiOperation({ summary: 'Get active sessions' })
|
||||
@ApiResponse({ status: 200, description: 'List of active sessions' })
|
||||
async getSessions() {
|
||||
return this.authService.getActiveSessions();
|
||||
}
|
||||
|
||||
@UseGuards(JwtAuthGuard)
|
||||
@Delete('sessions/:id')
|
||||
@ApiBearerAuth()
|
||||
@ApiOperation({ summary: 'Revoke session' })
|
||||
@ApiResponse({ status: 200, description: 'Session revoked' })
|
||||
async revokeSession(@Param('id') id: string) {
|
||||
return this.authService.revokeSession(parseInt(id));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +1,43 @@
|
||||
// File: src/common/auth/auth.module.ts
|
||||
// บันทึกการแก้ไข: แก้ไข Type Mismatch ของ expiresIn (Fix TS2322)
|
||||
// [P0-1] เพิ่ม CASL RBAC Integration
|
||||
// [P2-2] Register RefreshToken Entity
|
||||
|
||||
import { Module } from '@nestjs/common';
|
||||
import { JwtModule } from '@nestjs/jwt';
|
||||
import { PassportModule } from '@nestjs/passport';
|
||||
import { ConfigModule, ConfigService } from '@nestjs/config';
|
||||
import { TypeOrmModule } from '@nestjs/typeorm';
|
||||
import { AuthService } from './auth.service.js';
|
||||
import { AuthController } from './auth.controller.js';
|
||||
import { UserModule } from '../../modules/user/user.module.js';
|
||||
import { JwtStrategy } from './jwt.strategy.js';
|
||||
import { JwtStrategy } from './strategies/jwt.strategy.js';
|
||||
import { JwtRefreshStrategy } from './strategies/jwt-refresh.strategy.js';
|
||||
import { User } from '../../modules/user/entities/user.entity';
|
||||
import { RefreshToken } from './entities/refresh-token.entity'; // [P2-2]
|
||||
import { CaslModule } from './casl/casl.module';
|
||||
import { PermissionsGuard } from './guards/permissions.guard';
|
||||
|
||||
@Module({
|
||||
imports: [
|
||||
TypeOrmModule.forFeature([User, RefreshToken]), // [P2-2] Added RefreshToken
|
||||
UserModule,
|
||||
PassportModule,
|
||||
JwtModule.registerAsync({
|
||||
imports: [ConfigModule],
|
||||
inject: [ConfigService],
|
||||
useFactory: async (configService: ConfigService) => ({
|
||||
useFactory: (configService: ConfigService) => ({
|
||||
secret: configService.get<string>('JWT_SECRET'),
|
||||
signOptions: {
|
||||
// Cast เป็น any เพื่อแก้ปัญหา Type ไม่ตรงกับ Library
|
||||
expiresIn: (configService.get<string>('JWT_EXPIRATION') ||
|
||||
'8h') as any,
|
||||
'15m') as any,
|
||||
},
|
||||
}),
|
||||
}),
|
||||
CaslModule,
|
||||
],
|
||||
providers: [AuthService, JwtStrategy],
|
||||
providers: [AuthService, JwtStrategy, JwtRefreshStrategy, PermissionsGuard],
|
||||
controllers: [AuthController],
|
||||
exports: [AuthService],
|
||||
exports: [AuthService, PermissionsGuard],
|
||||
})
|
||||
export class AuthModule {}
|
||||
|
||||
@@ -1,18 +1,201 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { AuthService } from './auth.service';
|
||||
import { UserService } from '../../modules/user/user.service';
|
||||
import { JwtService } from '@nestjs/jwt';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { getRepositoryToken } from '@nestjs/typeorm';
|
||||
import { User } from '../../modules/user/entities/user.entity';
|
||||
import { RefreshToken } from './entities/refresh-token.entity';
|
||||
import { Repository } from 'typeorm';
|
||||
import { UnauthorizedException } from '@nestjs/common';
|
||||
|
||||
// Mock bcrypt at top level
|
||||
jest.mock('bcrypt', () => ({
|
||||
compare: jest.fn(),
|
||||
hash: jest.fn().mockResolvedValue('hashedpassword'),
|
||||
genSalt: jest.fn().mockResolvedValue('salt'),
|
||||
}));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-require-imports
|
||||
const bcrypt = require('bcrypt');
|
||||
|
||||
describe('AuthService', () => {
|
||||
let service: AuthService;
|
||||
let userService: UserService;
|
||||
let jwtService: JwtService;
|
||||
let tokenRepo: Repository<RefreshToken>;
|
||||
|
||||
const mockUser = {
|
||||
user_id: 1,
|
||||
username: 'testuser',
|
||||
password: 'hashedpassword',
|
||||
primaryOrganizationId: 1,
|
||||
};
|
||||
|
||||
const mockQueryBuilder = {
|
||||
addSelect: jest.fn().mockReturnThis(),
|
||||
where: jest.fn().mockReturnThis(),
|
||||
getOne: jest.fn().mockResolvedValue(mockUser),
|
||||
};
|
||||
|
||||
const mockUserRepo = {
|
||||
createQueryBuilder: jest.fn().mockReturnValue(mockQueryBuilder),
|
||||
};
|
||||
|
||||
const mockTokenRepo = {
|
||||
create: jest.fn(),
|
||||
save: jest.fn(),
|
||||
findOne: jest.fn(),
|
||||
update: jest.fn(),
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
// Reset bcrypt mocks
|
||||
bcrypt.compare.mockResolvedValue(true);
|
||||
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [AuthService],
|
||||
providers: [
|
||||
AuthService,
|
||||
{
|
||||
provide: UserService,
|
||||
useValue: {
|
||||
findOneByUsername: jest.fn(),
|
||||
create: jest.fn(),
|
||||
findOne: jest.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: JwtService,
|
||||
useValue: {
|
||||
signAsync: jest.fn().mockResolvedValue('jwt_token'),
|
||||
decode: jest.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: ConfigService,
|
||||
useValue: {
|
||||
get: jest.fn().mockImplementation((key: string) => {
|
||||
if (key.includes('EXPIRATION')) return '1h';
|
||||
return 'secret';
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: CACHE_MANAGER,
|
||||
useValue: {
|
||||
set: jest.fn(),
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: getRepositoryToken(User),
|
||||
useValue: mockUserRepo,
|
||||
},
|
||||
{
|
||||
provide: getRepositoryToken(RefreshToken),
|
||||
useValue: mockTokenRepo,
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
service = module.get<AuthService>(AuthService);
|
||||
userService = module.get<UserService>(UserService);
|
||||
jwtService = module.get<JwtService>(JwtService);
|
||||
tokenRepo = module.get(getRepositoryToken(RefreshToken));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(service).toBeDefined();
|
||||
});
|
||||
|
||||
describe('validateUser', () => {
|
||||
it('should return user without password if validation succeeds', async () => {
|
||||
const result = await service.validateUser('testuser', 'password');
|
||||
expect(result).toBeDefined();
|
||||
expect(result).not.toHaveProperty('password');
|
||||
expect(result.username).toBe('testuser');
|
||||
});
|
||||
|
||||
it('should return null if user not found', async () => {
|
||||
mockQueryBuilder.getOne.mockResolvedValueOnce(null);
|
||||
const result = await service.validateUser('unknown', 'password');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should return null if password mismatch', async () => {
|
||||
bcrypt.compare.mockResolvedValueOnce(false);
|
||||
const result = await service.validateUser('testuser', 'wrongpassword');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should return access and refresh tokens', async () => {
|
||||
mockTokenRepo.create.mockReturnValue({ id: 1 });
|
||||
mockTokenRepo.save.mockResolvedValue({ id: 1 });
|
||||
|
||||
const result = await service.login(mockUser);
|
||||
|
||||
expect(result).toHaveProperty('access_token');
|
||||
expect(result).toHaveProperty('refresh_token');
|
||||
expect(mockTokenRepo.save).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('register', () => {
|
||||
it('should register a new user', async () => {
|
||||
(userService.findOneByUsername as jest.Mock).mockResolvedValue(null);
|
||||
(userService.create as jest.Mock).mockResolvedValue(mockUser);
|
||||
|
||||
const dto = {
|
||||
username: 'newuser',
|
||||
password: 'password',
|
||||
email: 'test@example.com',
|
||||
firstName: 'Test',
|
||||
lastName: 'User',
|
||||
};
|
||||
|
||||
const result = await service.register(dto);
|
||||
expect(result).toBeDefined();
|
||||
expect(userService.create).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('refreshToken', () => {
|
||||
it('should return new tokens if valid', async () => {
|
||||
const mockStoredToken = {
|
||||
tokenHash: 'somehash',
|
||||
isRevoked: false,
|
||||
expiresAt: new Date(Date.now() + 10000),
|
||||
};
|
||||
mockTokenRepo.findOne.mockResolvedValue(mockStoredToken);
|
||||
(userService.findOne as jest.Mock).mockResolvedValue(mockUser);
|
||||
|
||||
const result = await service.refreshToken(1, 'valid_refresh_token');
|
||||
|
||||
expect(result.access_token).toBeDefined();
|
||||
expect(result.refresh_token).toBeDefined();
|
||||
// Should mark old token as revoked
|
||||
expect(mockTokenRepo.save).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ isRevoked: true })
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw UnauthorizedException if token revoked', async () => {
|
||||
const mockStoredToken = {
|
||||
tokenHash: 'somehash',
|
||||
isRevoked: true,
|
||||
expiresAt: new Date(Date.now() + 10000),
|
||||
};
|
||||
mockTokenRepo.findOne.mockResolvedValue(mockStoredToken);
|
||||
|
||||
await expect(service.refreshToken(1, 'revoked_token')).rejects.toThrow(
|
||||
UnauthorizedException
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,19 +1,59 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
// File: src/common/auth/auth.service.ts
|
||||
// บันทึกการแก้ไข:
|
||||
// 1. แก้ไข Type Mismatch ใน signAsync
|
||||
// 2. แก้ไข validateUser ให้ดึง password_hash ออกมาด้วย (Fix HTTP 500: data and hash arguments required)
|
||||
// 3. [P2-2] Implement Refresh Token storage & rotation
|
||||
|
||||
import {
|
||||
Injectable,
|
||||
UnauthorizedException,
|
||||
Inject,
|
||||
BadRequestException,
|
||||
} from '@nestjs/common';
|
||||
import { JwtService } from '@nestjs/jwt';
|
||||
import { ConfigService } from '@nestjs/config';
|
||||
import { CACHE_MANAGER } from '@nestjs/cache-manager';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
import type { Cache } from 'cache-manager';
|
||||
import * as bcrypt from 'bcrypt';
|
||||
import { UserService } from '../../modules/user/user.service.js';
|
||||
import { RegisterDto } from './dto/register.dto.js'; // Import DTO
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
import { UserService } from '../../modules/user/user.service';
|
||||
import { User } from '../../modules/user/entities/user.entity';
|
||||
import { RegisterDto } from './dto/register.dto';
|
||||
import { RefreshToken } from './entities/refresh-token.entity'; // [P2-2]
|
||||
|
||||
@Injectable()
|
||||
export class AuthService {
|
||||
constructor(
|
||||
private userService: UserService,
|
||||
private jwtService: JwtService,
|
||||
private configService: ConfigService,
|
||||
@Inject(CACHE_MANAGER) private cacheManager: Cache,
|
||||
@InjectRepository(User)
|
||||
private usersRepository: Repository<User>,
|
||||
// [P2-2] Inject RefreshToken Repository
|
||||
@InjectRepository(RefreshToken)
|
||||
private refreshTokenRepository: Repository<RefreshToken>
|
||||
) {}
|
||||
|
||||
// 1. ตรวจสอบ Username/Password
|
||||
async validateUser(username: string, pass: string): Promise<any> {
|
||||
const user = await this.userService.findOneByUsername(username);
|
||||
if (user && (await bcrypt.compare(pass, user.password))) {
|
||||
console.log(`🔍 Checking login for: ${username}`);
|
||||
const user = await this.usersRepository
|
||||
.createQueryBuilder('user')
|
||||
.addSelect('user.password')
|
||||
.where('user.username = :username', { username })
|
||||
.getOne();
|
||||
|
||||
if (!user) {
|
||||
console.log('❌ User not found in database');
|
||||
return null;
|
||||
}
|
||||
|
||||
// ตรวจสอบว่ามี user และมี password hash หรือไม่
|
||||
if (user && user.password && (await bcrypt.compare(pass, user.password))) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { password, ...result } = user;
|
||||
return result;
|
||||
@@ -21,21 +61,212 @@ export class AuthService {
|
||||
return null;
|
||||
}
|
||||
|
||||
// 2. Login: สร้าง Access & Refresh Token และบันทึกลง DB
|
||||
async login(user: any) {
|
||||
const payload = { username: user.username, sub: user.user_id };
|
||||
const payload = {
|
||||
username: user.username,
|
||||
sub: user.user_id,
|
||||
scope: 'Global',
|
||||
};
|
||||
|
||||
const accessToken = await this.jwtService.signAsync(payload, {
|
||||
secret: this.configService.get<string>('JWT_SECRET'),
|
||||
expiresIn: (this.configService.get<string>('JWT_EXPIRATION') ||
|
||||
'15m') as any,
|
||||
});
|
||||
|
||||
const refreshToken = await this.jwtService.signAsync(payload, {
|
||||
secret: this.configService.get<string>('JWT_REFRESH_SECRET'),
|
||||
expiresIn: (this.configService.get<string>('JWT_REFRESH_EXPIRATION') ||
|
||||
'7d') as any,
|
||||
});
|
||||
|
||||
// [P2-2] Store Refresh Token in DB
|
||||
await this.storeRefreshToken(user.user_id, refreshToken);
|
||||
|
||||
return {
|
||||
access_token: this.jwtService.sign(payload),
|
||||
access_token: accessToken,
|
||||
refresh_token: refreshToken,
|
||||
user: user,
|
||||
};
|
||||
}
|
||||
|
||||
// [P2-2] Store Refresh Token Logic
|
||||
private async storeRefreshToken(userId: number, token: string) {
|
||||
// Hash token before storing for security
|
||||
const hash = crypto.createHash('sha256').update(token).digest('hex');
|
||||
const expiresInDays = 7; // Should match JWT_REFRESH_EXPIRATION
|
||||
const expiresAt = new Date();
|
||||
expiresAt.setDate(expiresAt.getDate() + expiresInDays);
|
||||
|
||||
const refreshTokenEntity = this.refreshTokenRepository.create({
|
||||
userId,
|
||||
tokenHash: hash,
|
||||
expiresAt,
|
||||
isRevoked: false,
|
||||
});
|
||||
|
||||
await this.refreshTokenRepository.save(refreshTokenEntity);
|
||||
}
|
||||
|
||||
// 3. Register (สำหรับ Admin)
|
||||
async register(userDto: RegisterDto) {
|
||||
const existingUser = await this.userService.findOneByUsername(
|
||||
userDto.username
|
||||
);
|
||||
if (existingUser) {
|
||||
throw new BadRequestException('Username already exists');
|
||||
}
|
||||
|
||||
const salt = await bcrypt.genSalt();
|
||||
const hashedPassword = await bcrypt.hash(userDto.password, salt);
|
||||
|
||||
// ใช้ค่าจาก DTO ที่ Validate มาแล้ว
|
||||
return this.userService.create({
|
||||
...userDto,
|
||||
password: hashedPassword,
|
||||
});
|
||||
}
|
||||
|
||||
// 4. Refresh Token: ตรวจสอบและออก Token ใหม่ (Rotation)
|
||||
async refreshToken(userId: number, refreshToken: string) {
|
||||
// Hash incoming token to match with DB
|
||||
const hash = crypto.createHash('sha256').update(refreshToken).digest('hex');
|
||||
|
||||
// Find token in DB
|
||||
const storedToken = await this.refreshTokenRepository.findOne({
|
||||
where: { tokenHash: hash },
|
||||
});
|
||||
|
||||
if (!storedToken) {
|
||||
throw new UnauthorizedException('Invalid refresh token');
|
||||
}
|
||||
|
||||
if (storedToken.isRevoked) {
|
||||
// Possible token theft! Invalidate all user tokens family
|
||||
await this.revokeAllUserTokens(userId);
|
||||
throw new UnauthorizedException('Refresh token revoked - Security alert');
|
||||
}
|
||||
|
||||
if (storedToken.expiresAt < new Date()) {
|
||||
throw new UnauthorizedException('Refresh token expired');
|
||||
}
|
||||
|
||||
// Valid token -> Rotate it
|
||||
const user = await this.userService.findOne(userId);
|
||||
if (!user) throw new UnauthorizedException('User not found');
|
||||
|
||||
const payload = { username: user.username, sub: user.user_id };
|
||||
|
||||
// Generate NEW tokens
|
||||
const newAccessToken = await this.jwtService.signAsync(payload, {
|
||||
secret: this.configService.get<string>('JWT_SECRET'),
|
||||
expiresIn: (this.configService.get<string>('JWT_EXPIRATION') ||
|
||||
'15m') as any,
|
||||
});
|
||||
|
||||
const newRefreshToken = await this.jwtService.signAsync(payload, {
|
||||
secret: this.configService.get<string>('JWT_REFRESH_SECRET'),
|
||||
expiresIn: (this.configService.get<string>('JWT_REFRESH_EXPIRATION') ||
|
||||
'7d') as any,
|
||||
});
|
||||
|
||||
// Revoke OLD token and point to NEW one
|
||||
const newHash = crypto
|
||||
.createHash('sha256')
|
||||
.update(newRefreshToken)
|
||||
.digest('hex');
|
||||
|
||||
storedToken.isRevoked = true;
|
||||
storedToken.replacedByToken = newHash;
|
||||
await this.refreshTokenRepository.save(storedToken);
|
||||
|
||||
// Save NEW token
|
||||
await this.storeRefreshToken(userId, newRefreshToken);
|
||||
|
||||
return {
|
||||
access_token: newAccessToken,
|
||||
refresh_token: newRefreshToken,
|
||||
};
|
||||
}
|
||||
|
||||
// [P2-2] Helper: Revoke all tokens for a user (Security Measure)
|
||||
private async revokeAllUserTokens(userId: number) {
|
||||
await this.refreshTokenRepository.update(
|
||||
{ userId, isRevoked: false },
|
||||
{ isRevoked: true }
|
||||
);
|
||||
}
|
||||
|
||||
// 5. Logout: Revoke current refresh token & Blacklist Access Token
|
||||
async logout(userId: number, accessToken: string, refreshToken?: string) {
|
||||
// Blacklist Access Token
|
||||
try {
|
||||
const decoded = this.jwtService.decode(accessToken);
|
||||
if (decoded && decoded.exp) {
|
||||
const ttl = decoded.exp - Math.floor(Date.now() / 1000);
|
||||
if (ttl > 0) {
|
||||
await this.cacheManager.set(
|
||||
`blacklist:token:${accessToken}`,
|
||||
true,
|
||||
ttl * 1000
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore decoding error
|
||||
}
|
||||
|
||||
// [P2-2] Revoke Refresh Token if provided
|
||||
if (refreshToken) {
|
||||
const hash = crypto
|
||||
.createHash('sha256')
|
||||
.update(refreshToken)
|
||||
.digest('hex');
|
||||
await this.refreshTokenRepository.update(
|
||||
{ tokenHash: hash },
|
||||
{ isRevoked: true }
|
||||
);
|
||||
}
|
||||
|
||||
return { message: 'Logged out successfully' };
|
||||
}
|
||||
|
||||
// [New] Get Active Sessions
|
||||
async getActiveSessions() {
|
||||
// Only return tokens that are NOT revoked and NOT expired
|
||||
const activeTokens = await this.refreshTokenRepository.find({
|
||||
where: {
|
||||
isRevoked: false,
|
||||
},
|
||||
relations: ['user'], // Ensure relations: ['user'] works if RefreshToken entity has relation
|
||||
order: { createdAt: 'DESC' },
|
||||
});
|
||||
|
||||
const now = new Date();
|
||||
// Filter expired tokens in memory if query builder is complex, or rely on where clause if possible.
|
||||
// Filter expired tokens
|
||||
return activeTokens
|
||||
.filter((t) => new Date(t.expiresAt) > now)
|
||||
.map((t) => ({
|
||||
id: t.tokenId.toString(),
|
||||
userId: t.userId,
|
||||
user: {
|
||||
username: t.user?.username || 'Unknown',
|
||||
firstName: t.user?.firstName || '',
|
||||
lastName: t.user?.lastName || '',
|
||||
},
|
||||
deviceName: 'Unknown Device', // Not stored in DB
|
||||
ipAddress: 'Unknown IP', // Not stored in DB
|
||||
lastActive: t.createdAt.toISOString(), // Best approximation
|
||||
isCurrent: false, // Cannot determine isCurrent without current session context match
|
||||
}));
|
||||
}
|
||||
|
||||
// [New] Revoke Session by ID
|
||||
async revokeSession(sessionId: number) {
|
||||
return this.refreshTokenRepository.update(
|
||||
{ tokenId: sessionId },
|
||||
{ isRevoked: true }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
131
backend/src/common/auth/casl/README.md
Normal file
131
backend/src/common/auth/casl/README.md
Normal file
@@ -0,0 +1,131 @@
|
||||
# P0-1: CASL RBAC Integration - Usage Example
|
||||
|
||||
## ตัวอย่างการใช้งานใน Controller
|
||||
|
||||
### 1. Import Required Dependencies
|
||||
|
||||
```typescript
|
||||
import { Controller, Post, Get, UseGuards, Body, Param } from '@nestjs/common';
|
||||
import { JwtAuthGuard } from '../common/auth/guards/jwt-auth.guard';
|
||||
import { PermissionsGuard } from '../common/auth/guards/permissions.guard';
|
||||
import { RequirePermission } from '../common/decorators/require-permission.decorator';
|
||||
```
|
||||
|
||||
### 2. Apply Guards and Permissions
|
||||
|
||||
```typescript
|
||||
@Controller('correspondences')
|
||||
@UseGuards(JwtAuthGuard) // Step 1: Authenticate user
|
||||
export class CorrespondenceController {
|
||||
|
||||
// ตัวอย่าง 1: Single Permission
|
||||
@Post()
|
||||
@UseGuards(PermissionsGuard) // Step 2: Check permissions
|
||||
@RequirePermission('correspondence.create')
|
||||
async create(@Body() dto: CreateCorrespondenceDto) {
|
||||
// Only users with 'correspondence.create' permission can access
|
||||
return this.correspondenceService.create(dto);
|
||||
}
|
||||
|
||||
// ตัวอย่าง 2: View (typically everyone with access)
|
||||
@Get(':id')
|
||||
@UseGuards(PermissionsGuard)
|
||||
@RequirePermission('correspondence.view')
|
||||
async findOne(@Param('id') id: string) {
|
||||
return this.correspondenceService.findOne(+id);
|
||||
}
|
||||
|
||||
// ตัวอย่าง 3: Admin Edit (requires special permission)
|
||||
@Put(':id/force-update')
|
||||
@UseGuards(PermissionsGuard)
|
||||
@RequirePermission('document.admin_edit')
|
||||
async forceUpdate(@Param('id') id: string, @Body() dto: UpdateDto) {
|
||||
// Only document controllers can force update
|
||||
return this.correspondenceService.forceUpdate(+id, dto);
|
||||
}
|
||||
|
||||
// ตัวอย่าง 4: Multiple Permissions (user must have ALL)
|
||||
@Delete(':id')
|
||||
@UseGuards(PermissionsGuard)
|
||||
@RequirePermission('correspondence.delete', 'document.admin_edit')
|
||||
async remove(@Param('id') id: string) {
|
||||
// Requires BOTH permissions
|
||||
return this.correspondenceService.remove(+id);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Controller with Scope Context
|
||||
|
||||
Permissions guard จะ extract scope จาก request params/body/query:
|
||||
|
||||
```typescript
|
||||
@Controller('projects/:projectId/correspondences')
|
||||
@UseGuards(JwtAuthGuard)
|
||||
export class ProjectCorrespondenceController {
|
||||
|
||||
@Post()
|
||||
@UseGuards(PermissionsGuard)
|
||||
@RequirePermission('correspondence.create')
|
||||
async create(
|
||||
@Param('projectId') projectId: string,
|
||||
@Body() dto: CreateCorrespondenceDto
|
||||
) {
|
||||
// PermissionsGuard จะ extract: { projectId: projectId }
|
||||
// และตรวจสอบว่า user มี permission ใน project นี้หรือไม่
|
||||
return this.service.create({ projectId, ...dto });
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## หลักการทำงาน
|
||||
|
||||
### Scope Matching Hierarchy
|
||||
|
||||
1. **Global Scope**: User ที่มี assignment โดยไม่ระบุ org/project/contract
|
||||
- สามารถ access ทุกอย่างได้
|
||||
|
||||
2. **Organization Scope**: User ที่มี assignment ระดับ organization
|
||||
- สามารถ access resources ใน organization นั้นเท่านั้น
|
||||
|
||||
3. **Project Scope**: User ที่มี assignment ระดับ project
|
||||
- สามารถ access resources ใน project นั้นเท่านั้น
|
||||
|
||||
4. **Contract Scope**: User ที่มี assignment ระดับ contract
|
||||
- สามารถ access resources ใน contract นั้นเท่านั้น
|
||||
|
||||
### Permission Format
|
||||
|
||||
Permission ใน database ต้องเป็นรูปแบบ: `{subject}.{action}`
|
||||
|
||||
ตัวอย่าง:
|
||||
- `correspondence.create`
|
||||
- `correspondence.view`
|
||||
- `correspondence.edit`
|
||||
- `document.admin_edit`
|
||||
- `rfa.create`
|
||||
- `project.manage_members`
|
||||
- `system.manage_all` (special case)
|
||||
|
||||
## Testing
|
||||
|
||||
Run unit tests:
|
||||
```bash
|
||||
npm run test -- ability.factory.spec
|
||||
```
|
||||
|
||||
Expected output:
|
||||
```
|
||||
✓ should grant all permissions for global admin
|
||||
✓ should grant permissions for matching organization
|
||||
✓ should deny permissions for non-matching organization
|
||||
✓ should grant permissions for matching project
|
||||
✓ should grant permissions for matching contract
|
||||
✓ should combine permissions from multiple assignments
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Update existing controllers to use `@RequirePermission()`
|
||||
2. Test with different user roles
|
||||
3. Verify scope matching works correctly
|
||||
164
backend/src/common/auth/casl/ability.factory.spec.ts
Normal file
164
backend/src/common/auth/casl/ability.factory.spec.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { AbilityFactory, ScopeContext } from './ability.factory';
|
||||
import { User } from '../../../modules/user/entities/user.entity';
|
||||
import { UserAssignment } from '../../../modules/user/entities/user-assignment.entity';
|
||||
|
||||
describe('AbilityFactory', () => {
|
||||
let factory: AbilityFactory;
|
||||
|
||||
beforeEach(async () => {
|
||||
const module: TestingModule = await Test.createTestingModule({
|
||||
providers: [AbilityFactory],
|
||||
}).compile();
|
||||
|
||||
factory = module.get<AbilityFactory>(AbilityFactory);
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(factory).toBeDefined();
|
||||
});
|
||||
|
||||
describe('Global Admin', () => {
|
||||
it('should grant all permissions for global admin', () => {
|
||||
const user = createMockUser({
|
||||
assignments: [
|
||||
createMockAssignment({
|
||||
organizationId: undefined,
|
||||
projectId: undefined,
|
||||
contractId: undefined,
|
||||
permissionNames: ['system.manage_all'],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const ability = factory.createForUser(user, {});
|
||||
|
||||
expect(ability.can('manage', 'all')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Organization Level', () => {
|
||||
it('should grant permissions for matching organization', () => {
|
||||
const user = createMockUser({
|
||||
assignments: [
|
||||
createMockAssignment({
|
||||
organizationId: 1,
|
||||
permissionNames: ['correspondence.create', 'correspondence.read'],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const context: ScopeContext = { organizationId: 1 };
|
||||
const ability = factory.createForUser(user, context);
|
||||
|
||||
expect(ability.can('create', 'correspondence')).toBe(true);
|
||||
expect(ability.can('read', 'correspondence')).toBe(true);
|
||||
});
|
||||
|
||||
it('should deny permissions for non-matching organization', () => {
|
||||
const user = createMockUser({
|
||||
assignments: [
|
||||
createMockAssignment({
|
||||
organizationId: 1,
|
||||
permissionNames: ['correspondence.create'],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const context: ScopeContext = { organizationId: 2 };
|
||||
const ability = factory.createForUser(user, context);
|
||||
|
||||
expect(ability.can('create', 'correspondence')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Project Level', () => {
|
||||
it('should grant permissions for matching project', () => {
|
||||
const user = createMockUser({
|
||||
assignments: [
|
||||
createMockAssignment({
|
||||
projectId: 10,
|
||||
permissionNames: ['rfa.create'],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const context: ScopeContext = { projectId: 10 };
|
||||
const ability = factory.createForUser(user, context);
|
||||
|
||||
expect(ability.can('create', 'rfa')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Contract Level', () => {
|
||||
it('should grant permissions for matching contract', () => {
|
||||
const user = createMockUser({
|
||||
assignments: [
|
||||
createMockAssignment({
|
||||
contractId: 5,
|
||||
permissionNames: ['drawing.create'],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const context: ScopeContext = { contractId: 5 };
|
||||
const ability = factory.createForUser(user, context);
|
||||
|
||||
expect(ability.can('create', 'drawing')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multiple Assignments', () => {
|
||||
it('should combine permissions from multiple assignments', () => {
|
||||
const user = createMockUser({
|
||||
assignments: [
|
||||
createMockAssignment({
|
||||
organizationId: 1,
|
||||
permissionNames: ['correspondence.create'],
|
||||
}),
|
||||
createMockAssignment({
|
||||
projectId: 10,
|
||||
permissionNames: ['rfa.create'],
|
||||
}),
|
||||
],
|
||||
});
|
||||
|
||||
const orgAbility = factory.createForUser(user, { organizationId: 1 });
|
||||
expect(orgAbility.can('create', 'correspondence')).toBe(true);
|
||||
|
||||
const projectAbility = factory.createForUser(user, { projectId: 10 });
|
||||
expect(projectAbility.can('create', 'rfa')).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Helper functions using mock objects
|
||||
function createMockUser(props: { assignments: UserAssignment[] }): User {
|
||||
const user = new User();
|
||||
user.user_id = 1;
|
||||
user.username = 'testuser';
|
||||
user.email = 'test@example.com';
|
||||
user.assignments = props.assignments;
|
||||
return user;
|
||||
}
|
||||
|
||||
function createMockAssignment(props: {
|
||||
organizationId?: number;
|
||||
projectId?: number;
|
||||
contractId?: number;
|
||||
permissionNames: string[];
|
||||
}): UserAssignment {
|
||||
const assignment = new UserAssignment();
|
||||
assignment.organizationId = props.organizationId;
|
||||
assignment.projectId = props.projectId;
|
||||
assignment.contractId = props.contractId;
|
||||
|
||||
// Create mock role with permissions
|
||||
assignment.role = {
|
||||
permissions: props.permissionNames.map((name) => ({
|
||||
permissionName: name,
|
||||
})),
|
||||
} as any;
|
||||
|
||||
return assignment;
|
||||
}
|
||||
133
backend/src/common/auth/casl/ability.factory.ts
Normal file
133
backend/src/common/auth/casl/ability.factory.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Ability, AbilityBuilder, AbilityClass } from '@casl/ability';
|
||||
import { User } from '../../../modules/user/entities/user.entity';
|
||||
import { UserAssignment } from '../../../modules/user/entities/user-assignment.entity';
|
||||
|
||||
// Define action types
|
||||
type Actions = 'create' | 'read' | 'update' | 'delete' | 'manage';
|
||||
|
||||
// Define subject types (resources)
|
||||
type Subjects =
|
||||
| 'correspondence'
|
||||
| 'rfa'
|
||||
| 'drawing'
|
||||
| 'transmittal'
|
||||
| 'circulation'
|
||||
| 'project'
|
||||
| 'organization'
|
||||
| 'user'
|
||||
| 'role'
|
||||
| 'workflow'
|
||||
| 'all';
|
||||
|
||||
export type AppAbility = Ability<[Actions, Subjects]>;
|
||||
|
||||
export interface ScopeContext {
|
||||
organizationId?: number;
|
||||
projectId?: number;
|
||||
contractId?: number;
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
export class AbilityFactory {
|
||||
/**
|
||||
* สร้าง Ability object สำหรับ User ในบริบทที่กำหนด
|
||||
* รองรับ 4-Level Hierarchical RBAC:
|
||||
* - Level 1: Global (no scope)
|
||||
* - Level 2: Organization
|
||||
* - Level 3: Project
|
||||
* - Level 4: Contract
|
||||
*/
|
||||
createForUser(user: User, context: ScopeContext): AppAbility {
|
||||
const { can, build } = new AbilityBuilder<AppAbility>(
|
||||
Ability as AbilityClass<AppAbility>
|
||||
);
|
||||
|
||||
if (!user || !user.assignments) {
|
||||
// No permissions for unauthenticated or incomplete user
|
||||
return build();
|
||||
}
|
||||
|
||||
// Iterate through user's role assignments
|
||||
// Iterate through user's role assignments
|
||||
user.assignments.forEach((assignment: UserAssignment) => {
|
||||
// Check if assignment matches the current context
|
||||
if (this.matchesScope(assignment, context)) {
|
||||
// Grant permissions from the role
|
||||
assignment.role.permissions?.forEach((permission) => {
|
||||
const [action, subject] = this.parsePermission(
|
||||
permission.permissionName
|
||||
);
|
||||
can(action as Actions, subject as Subjects);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return build({
|
||||
// Detect subject type (for future use with objects)
|
||||
detectSubjectType: (item: any) => {
|
||||
if (typeof item === 'string') return item;
|
||||
return item.constructor;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* ตรวจสอบว่า Assignment ตรงกับ Scope Context หรือไม่
|
||||
* Hierarchical matching:
|
||||
* - Global assignment matches all contexts
|
||||
* - Organization assignment matches if org IDs match
|
||||
* - Project assignment matches if project IDs match
|
||||
* - Contract assignment matches if contract IDs match
|
||||
*/
|
||||
private matchesScope(
|
||||
assignment: UserAssignment,
|
||||
context: ScopeContext
|
||||
): boolean {
|
||||
// Level 1: Global scope (no organizationId, projectId, contractId)
|
||||
if (
|
||||
!assignment.organizationId &&
|
||||
!assignment.projectId &&
|
||||
!assignment.contractId
|
||||
) {
|
||||
return true; // Global admin can access everything
|
||||
}
|
||||
|
||||
// Level 4: Contract scope (most specific)
|
||||
if (assignment.contractId) {
|
||||
return context.contractId === assignment.contractId;
|
||||
}
|
||||
|
||||
// Level 3: Project scope
|
||||
if (assignment.projectId) {
|
||||
return context.projectId === assignment.projectId;
|
||||
}
|
||||
|
||||
// Level 2: Organization scope
|
||||
if (assignment.organizationId) {
|
||||
return context.organizationId === assignment.organizationId;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* แปลง permission name เป็น [action, subject]
|
||||
* Format: "correspondence.create" → ["create", "correspondence"]
|
||||
* "project.view" → ["view", "project"]
|
||||
*/
|
||||
private parsePermission(permissionName: string): [string, string] {
|
||||
// Fallback for special permissions like "system.manage_all"
|
||||
if (permissionName === 'system.manage_all') {
|
||||
return ['manage', 'all'];
|
||||
}
|
||||
|
||||
const parts = permissionName.split('.');
|
||||
if (parts.length === 2) {
|
||||
const [subject, action] = parts;
|
||||
return [action, subject];
|
||||
}
|
||||
|
||||
throw new Error(`Invalid permission format: ${permissionName}`);
|
||||
}
|
||||
}
|
||||
8
backend/src/common/auth/casl/casl.module.ts
Normal file
8
backend/src/common/auth/casl/casl.module.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { AbilityFactory } from './ability.factory';
|
||||
|
||||
@Module({
|
||||
providers: [AbilityFactory],
|
||||
exports: [AbilityFactory],
|
||||
})
|
||||
export class CaslModule {}
|
||||
@@ -1,10 +1,16 @@
|
||||
import { IsNotEmpty, IsString } from 'class-validator';
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
|
||||
export class LoginDto {
|
||||
@ApiProperty({
|
||||
description: 'Username (Email)',
|
||||
example: 'admin@np-dms.work',
|
||||
})
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
username!: string;
|
||||
|
||||
@ApiProperty({ description: 'Password', example: 'password123' })
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
password!: string;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user