diff --git a/.gitea/workflows/cd.yml b/.gitea/workflows/cd.yml
new file mode 100644
index 0000000..59e5975
--- /dev/null
+++ b/.gitea/workflows/cd.yml
@@ -0,0 +1,129 @@
+name: CI / CD Pipeline
+
+on:
+ push:
+ branches: [main, develop]
+ pull_request:
+ workflow_dispatch:
+
+jobs:
+ # ============================================================
+ # JOB 1 : CI & Quality Gate
+ # ============================================================
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: ๐ฅ Checkout
+ uses: actions/checkout@v4
+
+ - name: ๐ข Setup Node
+ uses: actions/setup-node@v4
+ with:
+ node-version: 20
+
+ - name: ๐ฆ Install pnpm
+ run: npm install -g pnpm
+
+ - name: ๐ฆ Install deps
+ run: pnpm install
+
+ - name: ๐งน Lint
+ run: pnpm lint
+
+ - name: ๐ Security & quality checks
+ run: |
+ # UUID misuse check (ADR-019)
+ if grep -r --include="*.ts" --include="*.tsx" "parseInt(.*uuid" .; then
+ echo "โ UUID misuse detected"
+ exit 1
+ fi
+ # console.log check (Clean Code)
+ if grep -r --include="*.ts" --include="*.tsx" "console.log" .; then
+ echo "โ console.log detected"
+ exit 1
+ fi
+
+ - name: ๐งช Run Tests
+ run: |
+ cd backend && pnpm test --watchAll=false
+ cd ../frontend && pnpm test run
+
+ - name: ๐๏ธ Verify Build
+ run: |
+ cd backend && pnpm build
+ cd ../frontend && pnpm build
+
+ # ============================================================
+ # JOB 2 : Build & Push Image (Release Stage)
+ # ============================================================
+ release:
+ needs: build
+ if: github.ref == 'refs/heads/main'
+ runs-on: ubuntu-latest
+ steps:
+ - name: ๐ฅ Checkout
+ uses: actions/checkout@v4
+
+ - name: ๐ท๏ธ Get Version
+ id: pkg-version
+ run: |
+ VERSION=$(node -p "require('./backend/package.json').version")
+ echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
+ echo "Releasing version: $VERSION"
+
+ - name: ๐ Login to Internal Registry
+ run: |
+ echo "${{ secrets.REGISTRY_PASSWORD }}" | docker login ${{ secrets.REGISTRY_URL }} -u ${{ secrets.REGISTRY_USERNAME }} --password-stdin
+
+ - name: ๐๏ธ Build & Push Backend
+ run: |
+ docker build -f backend/Dockerfile \
+ -t ${{ secrets.REGISTRY_URL }}/lcbp3-backend:${{ steps.pkg-version.outputs.VERSION }} \
+ -t ${{ secrets.REGISTRY_URL }}/lcbp3-backend:latest .
+ docker push ${{ secrets.REGISTRY_URL }}/lcbp3-backend:${{ steps.pkg-version.outputs.VERSION }}
+ docker push ${{ secrets.REGISTRY_URL }}/lcbp3-backend:latest
+
+ - name: ๐๏ธ Build & Push Frontend
+ run: |
+ docker build -f frontend/Dockerfile \
+ --build-arg NEXT_PUBLIC_API_URL=${{ secrets.NEXT_PUBLIC_API_URL }} \
+ -t ${{ secrets.REGISTRY_URL }}/lcbp3-frontend:${{ steps.pkg-version.outputs.VERSION }} \
+ -t ${{ secrets.REGISTRY_URL }}/lcbp3-frontend:latest .
+ docker push ${{ secrets.REGISTRY_URL }}/lcbp3-frontend:${{ steps.pkg-version.outputs.VERSION }}
+ docker push ${{ secrets.REGISTRY_URL }}/lcbp3-frontend:latest
+
+ # ============================================================
+ # JOB 3 : Deploy โ Trigger Blue-Green on QNAP
+ # ============================================================
+ deploy:
+ needs: release
+ if: github.ref == 'refs/heads/main'
+ runs-on: ubuntu-latest
+ steps:
+ - name: ๐ Trigger Deployment on QNAP
+ uses: appleboy/ssh-action@v1.0.3
+ with:
+ host: ${{ secrets.HOST }}
+ username: ${{ secrets.USERNAME }}
+ password: ${{ secrets.PASSWORD }}
+ port: ${{ secrets.PORT }}
+ timeout: 1200s
+ command_timeout: 900s
+ script_stop_signal: true
+ script: |
+ set -e
+ export PATH="/share/CACHEDEV1_DATA/.qpkg/container-station/bin:/opt/bin:/usr/local/bin:/usr/bin:/bin:$PATH"
+
+ # Sync scripts first
+ echo "๐ Syncing deployment scripts..."
+ cd /share/np-dms/app/source/lcbp3
+ git fetch origin main
+ git reset --hard origin/main
+
+ # Ensure scripts are executable
+ chmod +x scripts/deploy.sh scripts/rollback.sh
+
+ echo "๐ Executing Blue-Green deployment..."
+ # Pass registry credentials if needed by the pull command in deploy.sh
+ export DB_PASSWORD="${{ secrets.DB_PASSWORD }}"
+ ./scripts/deploy.sh
diff --git a/backend/package.json b/backend/package.json
index e274a39..dd1d7a3 100644
--- a/backend/package.json
+++ b/backend/package.json
@@ -1,6 +1,6 @@
{
"name": "backend",
- "version": "1.8.0",
+ "version": "1.8.1",
"packageManager": "pnpm@10.32.1+sha512.a706938f0e89ac1456b6563eab4edf1d1faf3368d1191fc5c59790e96dc918e4456ab2e67d613de1043d2e8c81f87303e6b40d4ffeca9df15ef1ad567348f2be",
"description": "
\r
\r
",
"author": "",
diff --git a/backend/src/common/auth/auth.service.ts b/backend/src/common/auth/auth.service.ts
index a13dd5a..2ccc1f6 100644
--- a/backend/src/common/auth/auth.service.ts
+++ b/backend/src/common/auth/auth.service.ts
@@ -78,7 +78,7 @@ export class AuthService {
derivedRole = 'DC';
}
}
- const { password, ...result } = user;
+ const { password: _password, ...result } = user;
return { ...result, role: derivedRole } as User & { role: string };
}
return null;
diff --git a/backend/src/database/seeds/run-seed.ts b/backend/src/database/seeds/run-seed.ts
index 8532d9a..27102b9 100644
--- a/backend/src/database/seeds/run-seed.ts
+++ b/backend/src/database/seeds/run-seed.ts
@@ -10,14 +10,9 @@ async function runSeeds() {
await dataSource.initialize();
try {
- // console.log('๐ฑ Seeding database...');
-
await seedOrganizations(dataSource);
await seedUsers(dataSource);
-
- // console.log('โ
Seeding completed!');
} catch (_error) {
- // console.error('โ Seeding failed:', _error);
} finally {
await dataSource.destroy();
}
diff --git a/backend/src/database/seeds/workflow-definitions.seed.ts b/backend/src/database/seeds/workflow-definitions.seed.ts
index 37bc7d7..a453d29 100644
--- a/backend/src/database/seeds/workflow-definitions.seed.ts
+++ b/backend/src/database/seeds/workflow-definitions.seed.ts
@@ -130,14 +130,9 @@ export const seedWorkflowDefinitions = async (dataSource: DataSource) => {
is_active: true,
})
);
- // console.log(`โ
Seeded Workflow: ${dsl.workflow} v${dsl.version}`);
} catch (_error) {
- // console.error(`โ Failed to seed workflow ${dsl.workflow}:`, _error);
}
} else {
- // console.log(
- // `โญ๏ธ Workflow already exists: ${dsl.workflow} v${dsl.version}`
- // );
}
}
};
diff --git a/backend/src/modules/contract/contract.controller.ts b/backend/src/modules/contract/contract.controller.ts
index ea52a0e..901cb9b 100644
--- a/backend/src/modules/contract/contract.controller.ts
+++ b/backend/src/modules/contract/contract.controller.ts
@@ -13,7 +13,6 @@ import {
ApiTags,
ApiOperation,
ApiBearerAuth,
- ApiQuery,
} from '@nestjs/swagger';
import { ContractService } from './contract.service.js';
import { CreateContractDto } from './dto/create-contract.dto.js';
diff --git a/backend/src/modules/correspondence/correspondence.service.ts b/backend/src/modules/correspondence/correspondence.service.ts
index 11f2109..6be4f6f 100644
--- a/backend/src/modules/correspondence/correspondence.service.ts
+++ b/backend/src/modules/correspondence/correspondence.service.ts
@@ -26,7 +26,6 @@ import { CreateCorrespondenceDto } from './dto/create-correspondence.dto';
import { UpdateCorrespondenceDto } from './dto/update-correspondence.dto';
import { AddReferenceDto } from './dto/add-reference.dto';
import { SearchCorrespondenceDto } from './dto/search-correspondence.dto';
-import { DeepPartial } from 'typeorm';
// Services
import { DocumentNumberingService } from '../document-numbering/services/document-numbering.service';
@@ -506,7 +505,7 @@ export class CorrespondenceService {
: undefined;
// 3. Update Correspondence Entity if needed
- const correspondenceUpdate: any = {};
+ const correspondenceUpdate: Record = {};
if (updateDto.disciplineId)
correspondenceUpdate.disciplineId = updateDto.disciplineId;
if (updResolvedProjectId)
@@ -519,7 +518,7 @@ export class CorrespondenceService {
}
// 4. Update Revision Entity
- const revisionUpdate: any = {};
+ const revisionUpdate: Record = {};
if (updateDto.subject) revisionUpdate.subject = updateDto.subject;
if (updateDto.body) revisionUpdate.body = updateDto.body;
if (updateDto.remarks) revisionUpdate.remarks = updateDto.remarks;
diff --git a/backend/src/modules/correspondence/entities/correspondence-recipient.entity.ts b/backend/src/modules/correspondence/entities/correspondence-recipient.entity.ts
index 3f6f22f..6af591d 100644
--- a/backend/src/modules/correspondence/entities/correspondence-recipient.entity.ts
+++ b/backend/src/modules/correspondence/entities/correspondence-recipient.entity.ts
@@ -1,4 +1,4 @@
-import { Entity, Column, PrimaryColumn, ManyToOne, JoinColumn } from 'typeorm';
+import { Entity, PrimaryColumn, ManyToOne, JoinColumn } from 'typeorm';
import { Correspondence } from './correspondence.entity';
import { Organization } from '../../organization/entities/organization.entity';
diff --git a/backend/src/modules/document-numbering/controllers/document-numbering.controller.ts b/backend/src/modules/document-numbering/controllers/document-numbering.controller.ts
index ecdde71..d286ecd 100644
--- a/backend/src/modules/document-numbering/controllers/document-numbering.controller.ts
+++ b/backend/src/modules/document-numbering/controllers/document-numbering.controller.ts
@@ -116,10 +116,6 @@ export class DocumentNumberingController {
year: dto.year,
customTokens: dto.customTokens,
});
- // console.log(
- // '[DocumentNumberingController] Preview result:',
- // JSON.stringify(result)
- // );
return result;
}
}
diff --git a/backend/src/modules/document-numbering/services/format.service.ts b/backend/src/modules/document-numbering/services/format.service.ts
index 8bc7d83..89a06c5 100644
--- a/backend/src/modules/document-numbering/services/format.service.ts
+++ b/backend/src/modules/document-numbering/services/format.service.ts
@@ -51,9 +51,6 @@ export class FormatService {
tokens,
options.sequence
);
- // console.log(
- // `[FormatService] Generated: "${previewNumber}" | Template: "${template}" | isDefault: ${isDefault}`
- // );
return { previewNumber, isDefault };
}
diff --git a/backend/src/modules/document-numbering/services/metrics.service.ts b/backend/src/modules/document-numbering/services/metrics.service.ts
index ab63795..a16613b 100644
--- a/backend/src/modules/document-numbering/services/metrics.service.ts
+++ b/backend/src/modules/document-numbering/services/metrics.service.ts
@@ -1,4 +1,4 @@
-import { Injectable, Logger } from '@nestjs/common';
+import { Injectable } from '@nestjs/common';
import { Counter, Gauge, Histogram } from 'prom-client';
import { InjectMetric } from '@willsoto/nestjs-prometheus';
diff --git a/backend/src/modules/document-numbering/services/template.service.ts b/backend/src/modules/document-numbering/services/template.service.ts
index 7516989..4ce5d53 100644
--- a/backend/src/modules/document-numbering/services/template.service.ts
+++ b/backend/src/modules/document-numbering/services/template.service.ts
@@ -1,4 +1,4 @@
-import { Injectable, Logger, NotFoundException } from '@nestjs/common';
+import { Injectable, Logger } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { DocumentNumberFormat } from '../entities/document-number-format.entity';
diff --git a/backend/src/modules/master/dto/create-tag.dto.ts b/backend/src/modules/master/dto/create-tag.dto.ts
index 70f461b..9e3879f 100644
--- a/backend/src/modules/master/dto/create-tag.dto.ts
+++ b/backend/src/modules/master/dto/create-tag.dto.ts
@@ -1,4 +1,4 @@
-import { IsString, IsNotEmpty, IsOptional, IsInt } from 'class-validator';
+import { IsString, IsNotEmpty, IsOptional } from 'class-validator';
import { ApiProperty } from '@nestjs/swagger';
export class CreateTagDto {
diff --git a/backend/src/modules/master/dto/save-number-format.dto.ts b/backend/src/modules/master/dto/save-number-format.dto.ts
index 6322184..73b46e2 100644
--- a/backend/src/modules/master/dto/save-number-format.dto.ts
+++ b/backend/src/modules/master/dto/save-number-format.dto.ts
@@ -1,4 +1,4 @@
-import { IsInt, IsString, IsNotEmpty, IsOptional } from 'class-validator';
+import { IsInt, IsString, IsNotEmpty } from 'class-validator';
export class SaveNumberFormatDto {
@IsInt()
diff --git a/backend/src/modules/master/master.controller.ts b/backend/src/modules/master/master.controller.ts
index 05092e8..25afb08 100644
--- a/backend/src/modules/master/master.controller.ts
+++ b/backend/src/modules/master/master.controller.ts
@@ -4,7 +4,6 @@ import {
Controller,
Get,
Post,
- Put,
Body,
Patch,
Param,
diff --git a/backend/src/modules/notification/dto/create-notification.dto.ts b/backend/src/modules/notification/dto/create-notification.dto.ts
index f941d2e..352497d 100644
--- a/backend/src/modules/notification/dto/create-notification.dto.ts
+++ b/backend/src/modules/notification/dto/create-notification.dto.ts
@@ -4,7 +4,6 @@ import {
IsOptional,
IsEnum,
IsNotEmpty,
- IsUrl,
} from 'class-validator';
import { NotificationType } from '../entities/notification.entity';
diff --git a/backend/src/modules/notification/notification-cleanup.service.ts b/backend/src/modules/notification/notification-cleanup.service.ts
index 616d22a..1865418 100644
--- a/backend/src/modules/notification/notification-cleanup.service.ts
+++ b/backend/src/modules/notification/notification-cleanup.service.ts
@@ -1,7 +1,7 @@
import { Injectable, Logger } from '@nestjs/common';
import { Cron, CronExpression } from '@nestjs/schedule';
import { InjectRepository } from '@nestjs/typeorm';
-import { Repository, LessThan } from 'typeorm';
+import { Repository } from 'typeorm';
import { Notification } from './entities/notification.entity';
@Injectable()
diff --git a/backend/src/modules/organization/organization.service.ts b/backend/src/modules/organization/organization.service.ts
index 1453475..547eacc 100644
--- a/backend/src/modules/organization/organization.service.ts
+++ b/backend/src/modules/organization/organization.service.ts
@@ -72,9 +72,6 @@ export class OrganizationService {
const [data, total] = await queryBuilder.getManyAndCount();
- // Debug logging
- // console.log(`[OrganizationService] Found ${total} organizations`);
-
return {
data,
meta: {
diff --git a/backend/src/modules/project/project.service.ts b/backend/src/modules/project/project.service.ts
index 4decc8f..1b4368f 100644
--- a/backend/src/modules/project/project.service.ts
+++ b/backend/src/modules/project/project.service.ts
@@ -5,7 +5,7 @@ import {
Logger,
} from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
-import { Repository, Like } from 'typeorm';
+import { Repository } from 'typeorm';
// Entities
import { Project } from './entities/project.entity';
diff --git a/backend/src/modules/user/dto/assign-role.dto.ts b/backend/src/modules/user/dto/assign-role.dto.ts
index 7d3414b..febb5b6 100644
--- a/backend/src/modules/user/dto/assign-role.dto.ts
+++ b/backend/src/modules/user/dto/assign-role.dto.ts
@@ -1,4 +1,4 @@
-import { IsInt, IsNotEmpty, IsOptional, ValidateIf } from 'class-validator';
+import { IsInt, IsNotEmpty, IsOptional } from 'class-validator';
export class AssignRoleDto {
@IsInt()
diff --git a/backend/src/scripts/migrate-storage-v2.ts b/backend/src/scripts/migrate-storage-v2.ts
index 3bc1f16..89b7b2b 100644
--- a/backend/src/scripts/migrate-storage-v2.ts
+++ b/backend/src/scripts/migrate-storage-v2.ts
@@ -13,14 +13,12 @@ async function migrateStorage() {
await dataSource.initialize();
try {
- // console.log('๐ Starting Storage Migration v2...');
const attachmentRepo = dataSource.getRepository(Attachment);
// Find all permanent attachments
const attachments = await attachmentRepo.find({
where: { isTemporary: false },
});
- // console.log(`Found ${attachments.length} permanent attachments.`);
let _movedCount = 0;
let _errorCount = 0;
@@ -32,8 +30,6 @@ async function migrateStorage() {
process.env.UPLOAD_PERMANENT_DIR ||
path.join(process.cwd(), 'uploads', 'permanent');
- // console.log(`Target Permanent Directory: ${permanentBaseDir}`);
-
if (!fs.existsSync(permanentBaseDir)) {
// console.warn(
// `Base directory not found: ${permanentBaseDir}. Creating it...`
@@ -49,7 +45,6 @@ async function migrateStorage() {
const currentPath = att.filePath;
if (!fs.existsSync(currentPath)) {
- // console.warn(`File not found on disk: ${currentPath} (ID: ${att.id})`);
_errorCount++;
continue;
}
@@ -67,7 +62,6 @@ async function migrateStorage() {
? new Date(att.referenceDate)
: new Date(att.createdAt);
if (isNaN(refDate.getTime())) {
- // console.warn(`Invalid date for ID ${att.id}, skipping.`);
_errorCount++;
continue;
}
@@ -97,22 +91,11 @@ async function migrateStorage() {
}
await attachmentRepo.save(att);
_movedCount++;
- // if (movedCount % 100 === 0) console.log(`Moved ${movedCount} files...`);
} catch (_err: unknown) {
- // console.error(
- // `Failed to move file ID ${att.id}:`,
- // (err as Error).message
- // );
_errorCount++;
}
}
-
- // console.log(`Migration completed.`);
- // console.log(`Moved: ${movedCount}`);
- // console.log(`Skipped: ${skippedCount}`);
- // console.log(`Errors: ${errorCount}`);
} catch (_error) {
- // console.error('Migration failed:', error);
} finally {
await dataSource.destroy();
}
diff --git a/frontend/components/numbering/template-tester.tsx b/frontend/components/numbering/template-tester.tsx
index e4f9ba6..9d2fc47 100644
--- a/frontend/components/numbering/template-tester.tsx
+++ b/frontend/components/numbering/template-tester.tsx
@@ -65,16 +65,13 @@ export function TemplateTester({ open, onOpenChange, template }: TemplateTesterP
disciplineId: Number(testData.disciplineId || '0'),
year: testData.year,
};
- // console.log("TemplateTester: Sending payload:", payload); /* TODO: Remove before prod */
const result = await numberingApi.previewNumber(payload);
- // console.log("TemplateTester: Received result:", result); /* TODO: Remove before prod */
setTestResult({
number: result.previewNumber,
isDefault: result.isDefault,
});
} catch (error: unknown) {
- // console.error("Test Preview Error:", error); /* TODO: Remove before prod */
const errMsg = error?.response?.data?.message || error?.message || 'Unknown error';
setTestResult({ number: `Error: ${errMsg}`, isDefault: false });
} finally {
diff --git a/frontend/lib/auth.ts b/frontend/lib/auth.ts
index 9151bc4..baf8d74 100644
--- a/frontend/lib/auth.ts
+++ b/frontend/lib/auth.ts
@@ -139,10 +139,6 @@ export const {
password: credentials.password as string,
};
- // console.log(`[AUTH] Attempting login at: ${baseUrl}/auth/login`); /* TODO: Remove before prod */
- // console.log(`[AUTH] Current process.env.INTERNAL_API_URL: ${process.env.INTERNAL_API_URL}`); /* TODO: Remove before prod */
- // console.log(`[AUTH] Current process.env.NEXT_PUBLIC_API_URL: ${process.env.NEXT_PUBLIC_API_URL}`); /* TODO: Remove before prod */
-
const res = await fetch(`${baseUrl}/auth/login`, {
method: 'POST',
body: JSON.stringify(payload),
@@ -153,9 +149,7 @@ export const {
});
if (!res.ok) {
- // console.error(`[AUTH] Login Failed: status ${res.status}`); /* TODO: Remove before prod */
const _errorBody = await res.text().catch(() => 'No error body');
- // console.error(`[AUTH] Error details: ${errorBody}`); /* TODO: Remove before prod */
return null;
}
@@ -163,12 +157,9 @@ export const {
const backendData = unwrapApiResponse(data);
if (!isLoginPayload(backendData)) {
- // console.error("[AUTH] Login failed: Invalid response format from backend (missing access_token)"); /* TODO: Remove before prod */
return null;
}
- // console.log(`[AUTH] Login Successful for user: ${backendData.user?.username || 'unknown'}`); /* TODO: Remove before prod */
-
return {
id: backendData.user.user_id.toString(),
name: `${backendData.user.firstName ?? ''} ${backendData.user.lastName ?? ''}`.trim(),
@@ -180,7 +171,6 @@ export const {
refreshToken: backendData.refresh_token,
} as User;
} catch (_error) {
- // console.error("[AUTH] Network/Fetch Error during authorize:", error); /* TODO: Remove before prod */
return null;
}
},
diff --git a/frontend/package.json b/frontend/package.json
index 434fd7e..75d8a3d 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -1,6 +1,6 @@
{
"name": "lcbp3-frontend",
- "version": "1.8.0",
+ "version": "1.8.1",
"packageManager": "pnpm@10.32.1+sha512.a706938f0e89ac1456b6563eab4edf1d1faf3368d1191fc5c59790e96dc918e4456ab2e67d613de1043d2e8c81f87303e6b40d4ffeca9df15ef1ad567348f2be",
"private": true,
"scripts": {
diff --git a/scripts/deploy.sh b/scripts/deploy.sh
new file mode 100644
index 0000000..b29a325
--- /dev/null
+++ b/scripts/deploy.sh
@@ -0,0 +1,131 @@
+#!/bin/bash
+
+# File: scripts/deploy.sh
+# LCBP3-DMS Blue-Green Deployment Script
+# v1.8.1
+
+set -e # Exit on error
+
+# Configuration
+LCBP3_DIR="/volume1/lcbp3"
+CURRENT_FILE="$LCBP3_DIR/current"
+
+# Ensure current file exists
+if [ ! -f "$CURRENT_FILE" ]; then
+ echo "blue" > "$CURRENT_FILE"
+fi
+
+CURRENT=$(cat "$CURRENT_FILE")
+TARGET=$([[ "$CURRENT" == "blue" ]] && echo "green" || echo "blue")
+
+echo "========================================="
+echo "LCBP3-DMS Blue-Green Deployment (v1.8.1)"
+echo "========================================="
+echo "Current environment: $CURRENT"
+echo "Target environment: $TARGET"
+echo "========================================="
+
+# Step 1: Backup database
+echo "[1/9] Creating database backup..."
+BACKUP_DIR="$LCBP3_DIR/shared/backups"
+mkdir -p "$BACKUP_DIR"
+BACKUP_FILE="$BACKUP_DIR/db-backup-$(date +%Y%m%d-%H%M%S).sql"
+
+# Note: DB_PASSWORD should be in environment or .env
+if [ -z "$DB_PASSWORD" ]; then
+ echo "Warning: DB_PASSWORD not found in environment. Attempting to source from .env..."
+ if [ -f "$LCBP3_DIR/$CURRENT/.env.production" ]; then
+ export $(grep DB_PASSWORD "$LCBP3_DIR/$CURRENT/.env.production" | xargs)
+ fi
+fi
+
+docker exec lcbp3-mariadb mysqldump -u root -p"${DB_PASSWORD}" lcbp3_dms > "$BACKUP_FILE"
+gzip "$BACKUP_FILE"
+echo "โ Backup created: $BACKUP_FILE.gz"
+
+# Step 2: Pull latest images from registry
+echo "[2/9] Pulling latest Docker images from internal registry..."
+cd "$LCBP3_DIR/$TARGET"
+docker-compose pull
+echo "โ Images pulled"
+
+# Step 3: Update configuration
+echo "[3/9] Updating configuration..."
+if [ -f "$LCBP3_DIR/.env.production.new" ]; then
+ cp "$LCBP3_DIR/.env.production.new" "$LCBP3_DIR/$TARGET/.env.production"
+ rm "$LCBP3_DIR/.env.production.new"
+ echo "โ Configuration updated from .env.production.new"
+fi
+
+# Step 4: Start target environment
+echo "[4/9] Starting $TARGET environment..."
+docker-compose up -d
+echo "โ $TARGET environment started"
+
+# Step 5: Wait for services to be ready
+echo "[5/9] Waiting for services to be healthy..."
+sleep 15
+
+# Check backend health
+for i in {1..30}; do
+ if docker exec lcbp3-${TARGET}-backend curl -f http://localhost:3000/health > /dev/null 2>&1; then
+ echo "โ Backend is healthy"
+ break
+ fi
+ if [ $i -eq 30 ]; then
+ echo "โ Backend health check failed!"
+ docker-compose logs backend
+ exit 1
+ fi
+ sleep 2
+done
+
+# Step 6: Run database migrations (ADR-009)
+echo "[6/9] Running database migrations..."
+# Note: Following ADR-009, this might be a no-op if manual SQL is preferred,
+# but keeping it for DTO/Entity alignment checks.
+docker exec lcbp3-${TARGET}-backend npm run start:prod -- --migration-run || echo "Migration check complete"
+echo "โ Migrations stage complete"
+
+# Step 7: Switch NGINX to target environment
+echo "[7/9] Switching NGINX to $TARGET..."
+NGINX_CONF="$LCBP3_DIR/nginx-proxy/nginx.conf"
+if [ -f "$NGINX_CONF" ]; then
+ sed -i "s/lcbp3-${CURRENT}-backend/lcbp3-${TARGET}-backend/g" "$NGINX_CONF"
+ sed -i "s/lcbp3-${CURRENT}-frontend/lcbp3-${TARGET}-frontend/g" "$NGINX_CONF"
+ docker exec lcbp3-nginx nginx -t
+ docker exec lcbp3-nginx nginx -s reload
+ echo "โ NGINX switched to $TARGET"
+else
+ echo "Warning: NGINX config not found at $NGINX_CONF. Skipping switch."
+fi
+
+# Step 8: Verify new environment
+echo "[8/9] Verifying new environment via Proxy..."
+sleep 5
+# Attempt to curl via the local proxy or direct container
+if docker exec lcbp3-nginx curl -f -k http://lcbp3-${TARGET}-backend:3000/health > /dev/null 2>&1; then
+ echo "โ New environment is responding via internal network"
+else
+ echo "โ New environment verification failed!"
+ echo "Rolling back..."
+ # Call rollback script if it exists
+ if [ -f "$LCBP3_DIR/scripts/rollback.sh" ]; then
+ "$LCBP3_DIR/scripts/rollback.sh"
+ fi
+ exit 1
+fi
+
+# Step 9: Stop old environment
+echo "[9/9] Stopping $CURRENT environment..."
+cd "$LCBP3_DIR/$CURRENT"
+docker-compose down
+echo "โ $CURRENT environment stopped"
+
+# Update current pointer
+echo "$TARGET" > "$CURRENT_FILE"
+
+echo "========================================="
+echo "โ Deployment completed successfully!"
+echo "Active environment: $TARGET"
+echo "========================================="
diff --git a/scripts/rollback.sh b/scripts/rollback.sh
new file mode 100644
index 0000000..557d0a4
--- /dev/null
+++ b/scripts/rollback.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+
+# File: scripts/rollback.sh
+# LCBP3-DMS Rollback Script
+# v1.8.1
+
+set -e
+
+LCBP3_DIR="/volume1/lcbp3"
+CURRENT_FILE="$LCBP3_DIR/current"
+CURRENT=$(cat "$CURRENT_FILE")
+PREVIOUS=$([[ "$CURRENT" == "blue" ]] && echo "green" || echo "blue")
+
+echo "========================================="
+echo "LCBP3-DMS Rollback (v1.8.1)"
+echo "========================================="
+echo "Current: $CURRENT"
+echo "Rolling back to: $PREVIOUS"
+echo "========================================="
+
+# Switch NGINX back
+echo "[1/3] Switching NGINX to $PREVIOUS..."
+NGINX_CONF="$LCBP3_DIR/nginx-proxy/nginx.conf"
+if [ -f "$NGINX_CONF" ]; then
+ sed -i "s/lcbp3-${CURRENT}-backend/lcbp3-${PREVIOUS}-backend/g" "$NGINX_CONF"
+ sed -i "s/lcbp3-${CURRENT}-frontend/lcbp3-${PREVIOUS}-frontend/g" "$NGINX_CONF"
+ docker exec lcbp3-nginx nginx -s reload
+ echo "โ NGINX switched back to $PREVIOUS"
+fi
+
+# Start previous environment if stopped
+echo "[2/3] Ensuring $PREVIOUS environment is running..."
+cd "$LCBP3_DIR/$PREVIOUS"
+docker-compose up -d
+sleep 15
+echo "โ $PREVIOUS environment is running"
+
+# Verify
+echo "[3/3] Verifying rollback..."
+if docker exec lcbp3-nginx curl -f -k http://lcbp3-${PREVIOUS}-backend:3000/health > /dev/null 2>&1; then
+ echo "โ Rollback successful"
+ echo "$PREVIOUS" > "$CURRENT_FILE"
+else
+ echo "โ Rollback verification failed!"
+ exit 1
+fi
+
+echo "========================================="
+echo "โ Rollback completed"
+echo "Active environment: $PREVIOUS"
+echo "========================================="
diff --git a/.gitea/workflows/ci.yml b/specs/99-archives/ci.yml
similarity index 100%
rename from .gitea/workflows/ci.yml
rename to specs/99-archives/ci.yml
diff --git a/.gitea/workflows/deploy.yaml b/specs/99-archives/deploy.yaml
similarity index 100%
rename from .gitea/workflows/deploy.yaml
rename to specs/99-archives/deploy.yaml