690322:2123 Fixing Deployment Errors
This commit is contained in:
@@ -0,0 +1,129 @@
|
||||
name: CI / CD Pipeline
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, develop]
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
# ============================================================
|
||||
# JOB 1 : CI & Quality Gate
|
||||
# ============================================================
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 📥 Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🟢 Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
|
||||
- name: 📦 Install pnpm
|
||||
run: npm install -g pnpm
|
||||
|
||||
- name: 📦 Install deps
|
||||
run: pnpm install
|
||||
|
||||
- name: 🧹 Lint
|
||||
run: pnpm lint
|
||||
|
||||
- name: 🔍 Security & quality checks
|
||||
run: |
|
||||
# UUID misuse check (ADR-019)
|
||||
if grep -r --include="*.ts" --include="*.tsx" "parseInt(.*uuid" .; then
|
||||
echo "❌ UUID misuse detected"
|
||||
exit 1
|
||||
fi
|
||||
# console.log check (Clean Code)
|
||||
if grep -r --include="*.ts" --include="*.tsx" "console.log" .; then
|
||||
echo "❌ console.log detected"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 🧪 Run Tests
|
||||
run: |
|
||||
cd backend && pnpm test --watchAll=false
|
||||
cd ../frontend && pnpm test run
|
||||
|
||||
- name: 🏗️ Verify Build
|
||||
run: |
|
||||
cd backend && pnpm build
|
||||
cd ../frontend && pnpm build
|
||||
|
||||
# ============================================================
|
||||
# JOB 2 : Build & Push Image (Release Stage)
|
||||
# ============================================================
|
||||
release:
|
||||
needs: build
|
||||
if: github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 📥 Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🏷️ Get Version
|
||||
id: pkg-version
|
||||
run: |
|
||||
VERSION=$(node -p "require('./backend/package.json').version")
|
||||
echo "VERSION=$VERSION" >> $GITHUB_OUTPUT
|
||||
echo "Releasing version: $VERSION"
|
||||
|
||||
- name: 🐋 Login to Internal Registry
|
||||
run: |
|
||||
echo "${{ secrets.REGISTRY_PASSWORD }}" | docker login ${{ secrets.REGISTRY_URL }} -u ${{ secrets.REGISTRY_USERNAME }} --password-stdin
|
||||
|
||||
- name: 🏗️ Build & Push Backend
|
||||
run: |
|
||||
docker build -f backend/Dockerfile \
|
||||
-t ${{ secrets.REGISTRY_URL }}/lcbp3-backend:${{ steps.pkg-version.outputs.VERSION }} \
|
||||
-t ${{ secrets.REGISTRY_URL }}/lcbp3-backend:latest .
|
||||
docker push ${{ secrets.REGISTRY_URL }}/lcbp3-backend:${{ steps.pkg-version.outputs.VERSION }}
|
||||
docker push ${{ secrets.REGISTRY_URL }}/lcbp3-backend:latest
|
||||
|
||||
- name: 🏗️ Build & Push Frontend
|
||||
run: |
|
||||
docker build -f frontend/Dockerfile \
|
||||
--build-arg NEXT_PUBLIC_API_URL=${{ secrets.NEXT_PUBLIC_API_URL }} \
|
||||
-t ${{ secrets.REGISTRY_URL }}/lcbp3-frontend:${{ steps.pkg-version.outputs.VERSION }} \
|
||||
-t ${{ secrets.REGISTRY_URL }}/lcbp3-frontend:latest .
|
||||
docker push ${{ secrets.REGISTRY_URL }}/lcbp3-frontend:${{ steps.pkg-version.outputs.VERSION }}
|
||||
docker push ${{ secrets.REGISTRY_URL }}/lcbp3-frontend:latest
|
||||
|
||||
# ============================================================
|
||||
# JOB 3 : Deploy — Trigger Blue-Green on QNAP
|
||||
# ============================================================
|
||||
deploy:
|
||||
needs: release
|
||||
if: github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 🚀 Trigger Deployment on QNAP
|
||||
uses: appleboy/ssh-action@v1.0.3
|
||||
with:
|
||||
host: ${{ secrets.HOST }}
|
||||
username: ${{ secrets.USERNAME }}
|
||||
password: ${{ secrets.PASSWORD }}
|
||||
port: ${{ secrets.PORT }}
|
||||
timeout: 1200s
|
||||
command_timeout: 900s
|
||||
script_stop_signal: true
|
||||
script: |
|
||||
set -e
|
||||
export PATH="/share/CACHEDEV1_DATA/.qpkg/container-station/bin:/opt/bin:/usr/local/bin:/usr/bin:/bin:$PATH"
|
||||
|
||||
# Sync scripts first
|
||||
echo "📂 Syncing deployment scripts..."
|
||||
cd /share/np-dms/app/source/lcbp3
|
||||
git fetch origin main
|
||||
git reset --hard origin/main
|
||||
|
||||
# Ensure scripts are executable
|
||||
chmod +x scripts/deploy.sh scripts/rollback.sh
|
||||
|
||||
echo "🚀 Executing Blue-Green deployment..."
|
||||
# Pass registry credentials if needed by the pull command in deploy.sh
|
||||
export DB_PASSWORD="${{ secrets.DB_PASSWORD }}"
|
||||
./scripts/deploy.sh
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"version": "1.8.0",
|
||||
"version": "1.8.1",
|
||||
"packageManager": "pnpm@10.32.1+sha512.a706938f0e89ac1456b6563eab4edf1d1faf3368d1191fc5c59790e96dc918e4456ab2e67d613de1043d2e8c81f87303e6b40d4ffeca9df15ef1ad567348f2be",
|
||||
"description": "<p align=\"center\">\r <a href=\"http://nestjs.com/\" target=\"blank\"><img src=\"https://nestjs.com/img/logo-small.svg\" width=\"120\" alt=\"Nest Logo\" /></a>\r </p>",
|
||||
"author": "",
|
||||
|
||||
@@ -78,7 +78,7 @@ export class AuthService {
|
||||
derivedRole = 'DC';
|
||||
}
|
||||
}
|
||||
const { password, ...result } = user;
|
||||
const { password: _password, ...result } = user;
|
||||
return { ...result, role: derivedRole } as User & { role: string };
|
||||
}
|
||||
return null;
|
||||
|
||||
@@ -10,14 +10,9 @@ async function runSeeds() {
|
||||
await dataSource.initialize();
|
||||
|
||||
try {
|
||||
// console.log('🌱 Seeding database...');
|
||||
|
||||
await seedOrganizations(dataSource);
|
||||
await seedUsers(dataSource);
|
||||
|
||||
// console.log('✅ Seeding completed!');
|
||||
} catch (_error) {
|
||||
// console.error('❌ Seeding failed:', _error);
|
||||
} finally {
|
||||
await dataSource.destroy();
|
||||
}
|
||||
|
||||
@@ -130,14 +130,9 @@ export const seedWorkflowDefinitions = async (dataSource: DataSource) => {
|
||||
is_active: true,
|
||||
})
|
||||
);
|
||||
// console.log(`✅ Seeded Workflow: ${dsl.workflow} v${dsl.version}`);
|
||||
} catch (_error) {
|
||||
// console.error(`❌ Failed to seed workflow ${dsl.workflow}:`, _error);
|
||||
}
|
||||
} else {
|
||||
// console.log(
|
||||
// `⏭️ Workflow already exists: ${dsl.workflow} v${dsl.version}`
|
||||
// );
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
ApiTags,
|
||||
ApiOperation,
|
||||
ApiBearerAuth,
|
||||
ApiQuery,
|
||||
} from '@nestjs/swagger';
|
||||
import { ContractService } from './contract.service.js';
|
||||
import { CreateContractDto } from './dto/create-contract.dto.js';
|
||||
|
||||
@@ -26,7 +26,6 @@ import { CreateCorrespondenceDto } from './dto/create-correspondence.dto';
|
||||
import { UpdateCorrespondenceDto } from './dto/update-correspondence.dto';
|
||||
import { AddReferenceDto } from './dto/add-reference.dto';
|
||||
import { SearchCorrespondenceDto } from './dto/search-correspondence.dto';
|
||||
import { DeepPartial } from 'typeorm';
|
||||
|
||||
// Services
|
||||
import { DocumentNumberingService } from '../document-numbering/services/document-numbering.service';
|
||||
@@ -506,7 +505,7 @@ export class CorrespondenceService {
|
||||
: undefined;
|
||||
|
||||
// 3. Update Correspondence Entity if needed
|
||||
const correspondenceUpdate: any = {};
|
||||
const correspondenceUpdate: Record<string, unknown> = {};
|
||||
if (updateDto.disciplineId)
|
||||
correspondenceUpdate.disciplineId = updateDto.disciplineId;
|
||||
if (updResolvedProjectId)
|
||||
@@ -519,7 +518,7 @@ export class CorrespondenceService {
|
||||
}
|
||||
|
||||
// 4. Update Revision Entity
|
||||
const revisionUpdate: any = {};
|
||||
const revisionUpdate: Record<string, unknown> = {};
|
||||
if (updateDto.subject) revisionUpdate.subject = updateDto.subject;
|
||||
if (updateDto.body) revisionUpdate.body = updateDto.body;
|
||||
if (updateDto.remarks) revisionUpdate.remarks = updateDto.remarks;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Entity, Column, PrimaryColumn, ManyToOne, JoinColumn } from 'typeorm';
|
||||
import { Entity, PrimaryColumn, ManyToOne, JoinColumn } from 'typeorm';
|
||||
import { Correspondence } from './correspondence.entity';
|
||||
import { Organization } from '../../organization/entities/organization.entity';
|
||||
|
||||
|
||||
@@ -116,10 +116,6 @@ export class DocumentNumberingController {
|
||||
year: dto.year,
|
||||
customTokens: dto.customTokens,
|
||||
});
|
||||
// console.log(
|
||||
// '[DocumentNumberingController] Preview result:',
|
||||
// JSON.stringify(result)
|
||||
// );
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,9 +51,6 @@ export class FormatService {
|
||||
tokens,
|
||||
options.sequence
|
||||
);
|
||||
// console.log(
|
||||
// `[FormatService] Generated: "${previewNumber}" | Template: "${template}" | isDefault: ${isDefault}`
|
||||
// );
|
||||
return { previewNumber, isDefault };
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { Counter, Gauge, Histogram } from 'prom-client';
|
||||
import { InjectMetric } from '@willsoto/nestjs-prometheus';
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Injectable, Logger, NotFoundException } from '@nestjs/common';
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
import { DocumentNumberFormat } from '../entities/document-number-format.entity';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { IsString, IsNotEmpty, IsOptional, IsInt } from 'class-validator';
|
||||
import { IsString, IsNotEmpty, IsOptional } from 'class-validator';
|
||||
import { ApiProperty } from '@nestjs/swagger';
|
||||
|
||||
export class CreateTagDto {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { IsInt, IsString, IsNotEmpty, IsOptional } from 'class-validator';
|
||||
import { IsInt, IsString, IsNotEmpty } from 'class-validator';
|
||||
|
||||
export class SaveNumberFormatDto {
|
||||
@IsInt()
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
Controller,
|
||||
Get,
|
||||
Post,
|
||||
Put,
|
||||
Body,
|
||||
Patch,
|
||||
Param,
|
||||
|
||||
@@ -4,7 +4,6 @@ import {
|
||||
IsOptional,
|
||||
IsEnum,
|
||||
IsNotEmpty,
|
||||
IsUrl,
|
||||
} from 'class-validator';
|
||||
import { NotificationType } from '../entities/notification.entity';
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { Cron, CronExpression } from '@nestjs/schedule';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Repository, LessThan } from 'typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
import { Notification } from './entities/notification.entity';
|
||||
|
||||
@Injectable()
|
||||
|
||||
@@ -72,9 +72,6 @@ export class OrganizationService {
|
||||
|
||||
const [data, total] = await queryBuilder.getManyAndCount();
|
||||
|
||||
// Debug logging
|
||||
// console.log(`[OrganizationService] Found ${total} organizations`);
|
||||
|
||||
return {
|
||||
data,
|
||||
meta: {
|
||||
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
Logger,
|
||||
} from '@nestjs/common';
|
||||
import { InjectRepository } from '@nestjs/typeorm';
|
||||
import { Repository, Like } from 'typeorm';
|
||||
import { Repository } from 'typeorm';
|
||||
|
||||
// Entities
|
||||
import { Project } from './entities/project.entity';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { IsInt, IsNotEmpty, IsOptional, ValidateIf } from 'class-validator';
|
||||
import { IsInt, IsNotEmpty, IsOptional } from 'class-validator';
|
||||
|
||||
export class AssignRoleDto {
|
||||
@IsInt()
|
||||
|
||||
@@ -13,14 +13,12 @@ async function migrateStorage() {
|
||||
await dataSource.initialize();
|
||||
|
||||
try {
|
||||
// console.log('🚀 Starting Storage Migration v2...');
|
||||
const attachmentRepo = dataSource.getRepository(Attachment);
|
||||
|
||||
// Find all permanent attachments
|
||||
const attachments = await attachmentRepo.find({
|
||||
where: { isTemporary: false },
|
||||
});
|
||||
// console.log(`Found ${attachments.length} permanent attachments.`);
|
||||
|
||||
let _movedCount = 0;
|
||||
let _errorCount = 0;
|
||||
@@ -32,8 +30,6 @@ async function migrateStorage() {
|
||||
process.env.UPLOAD_PERMANENT_DIR ||
|
||||
path.join(process.cwd(), 'uploads', 'permanent');
|
||||
|
||||
// console.log(`Target Permanent Directory: ${permanentBaseDir}`);
|
||||
|
||||
if (!fs.existsSync(permanentBaseDir)) {
|
||||
// console.warn(
|
||||
// `Base directory not found: ${permanentBaseDir}. Creating it...`
|
||||
@@ -49,7 +45,6 @@ async function migrateStorage() {
|
||||
|
||||
const currentPath = att.filePath;
|
||||
if (!fs.existsSync(currentPath)) {
|
||||
// console.warn(`File not found on disk: ${currentPath} (ID: ${att.id})`);
|
||||
_errorCount++;
|
||||
continue;
|
||||
}
|
||||
@@ -67,7 +62,6 @@ async function migrateStorage() {
|
||||
? new Date(att.referenceDate)
|
||||
: new Date(att.createdAt);
|
||||
if (isNaN(refDate.getTime())) {
|
||||
// console.warn(`Invalid date for ID ${att.id}, skipping.`);
|
||||
_errorCount++;
|
||||
continue;
|
||||
}
|
||||
@@ -97,22 +91,11 @@ async function migrateStorage() {
|
||||
}
|
||||
await attachmentRepo.save(att);
|
||||
_movedCount++;
|
||||
// if (movedCount % 100 === 0) console.log(`Moved ${movedCount} files...`);
|
||||
} catch (_err: unknown) {
|
||||
// console.error(
|
||||
// `Failed to move file ID ${att.id}:`,
|
||||
// (err as Error).message
|
||||
// );
|
||||
_errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
// console.log(`Migration completed.`);
|
||||
// console.log(`Moved: ${movedCount}`);
|
||||
// console.log(`Skipped: ${skippedCount}`);
|
||||
// console.log(`Errors: ${errorCount}`);
|
||||
} catch (_error) {
|
||||
// console.error('Migration failed:', error);
|
||||
} finally {
|
||||
await dataSource.destroy();
|
||||
}
|
||||
|
||||
@@ -65,16 +65,13 @@ export function TemplateTester({ open, onOpenChange, template }: TemplateTesterP
|
||||
disciplineId: Number(testData.disciplineId || '0'),
|
||||
year: testData.year,
|
||||
};
|
||||
// console.log("TemplateTester: Sending payload:", payload); /* TODO: Remove before prod */
|
||||
const result = await numberingApi.previewNumber(payload);
|
||||
// console.log("TemplateTester: Received result:", result); /* TODO: Remove before prod */
|
||||
|
||||
setTestResult({
|
||||
number: result.previewNumber,
|
||||
isDefault: result.isDefault,
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
// console.error("Test Preview Error:", error); /* TODO: Remove before prod */
|
||||
const errMsg = error?.response?.data?.message || error?.message || 'Unknown error';
|
||||
setTestResult({ number: `Error: ${errMsg}`, isDefault: false });
|
||||
} finally {
|
||||
|
||||
@@ -139,10 +139,6 @@ export const {
|
||||
password: credentials.password as string,
|
||||
};
|
||||
|
||||
// console.log(`[AUTH] Attempting login at: ${baseUrl}/auth/login`); /* TODO: Remove before prod */
|
||||
// console.log(`[AUTH] Current process.env.INTERNAL_API_URL: ${process.env.INTERNAL_API_URL}`); /* TODO: Remove before prod */
|
||||
// console.log(`[AUTH] Current process.env.NEXT_PUBLIC_API_URL: ${process.env.NEXT_PUBLIC_API_URL}`); /* TODO: Remove before prod */
|
||||
|
||||
const res = await fetch(`${baseUrl}/auth/login`, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(payload),
|
||||
@@ -153,9 +149,7 @@ export const {
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
// console.error(`[AUTH] Login Failed: status ${res.status}`); /* TODO: Remove before prod */
|
||||
const _errorBody = await res.text().catch(() => 'No error body');
|
||||
// console.error(`[AUTH] Error details: ${errorBody}`); /* TODO: Remove before prod */
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -163,12 +157,9 @@ export const {
|
||||
const backendData = unwrapApiResponse(data);
|
||||
|
||||
if (!isLoginPayload(backendData)) {
|
||||
// console.error("[AUTH] Login failed: Invalid response format from backend (missing access_token)"); /* TODO: Remove before prod */
|
||||
return null;
|
||||
}
|
||||
|
||||
// console.log(`[AUTH] Login Successful for user: ${backendData.user?.username || 'unknown'}`); /* TODO: Remove before prod */
|
||||
|
||||
return {
|
||||
id: backendData.user.user_id.toString(),
|
||||
name: `${backendData.user.firstName ?? ''} ${backendData.user.lastName ?? ''}`.trim(),
|
||||
@@ -180,7 +171,6 @@ export const {
|
||||
refreshToken: backendData.refresh_token,
|
||||
} as User;
|
||||
} catch (_error) {
|
||||
// console.error("[AUTH] Network/Fetch Error during authorize:", error); /* TODO: Remove before prod */
|
||||
return null;
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "lcbp3-frontend",
|
||||
"version": "1.8.0",
|
||||
"version": "1.8.1",
|
||||
"packageManager": "pnpm@10.32.1+sha512.a706938f0e89ac1456b6563eab4edf1d1faf3368d1191fc5c59790e96dc918e4456ab2e67d613de1043d2e8c81f87303e6b40d4ffeca9df15ef1ad567348f2be",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
|
||||
@@ -0,0 +1,131 @@
|
||||
#!/bin/bash
|
||||
|
||||
# File: scripts/deploy.sh
|
||||
# LCBP3-DMS Blue-Green Deployment Script
|
||||
# v1.8.1
|
||||
|
||||
set -e # Exit on error
|
||||
|
||||
# Configuration
|
||||
LCBP3_DIR="/volume1/lcbp3"
|
||||
CURRENT_FILE="$LCBP3_DIR/current"
|
||||
|
||||
# Ensure current file exists
|
||||
if [ ! -f "$CURRENT_FILE" ]; then
|
||||
echo "blue" > "$CURRENT_FILE"
|
||||
fi
|
||||
|
||||
CURRENT=$(cat "$CURRENT_FILE")
|
||||
TARGET=$([[ "$CURRENT" == "blue" ]] && echo "green" || echo "blue")
|
||||
|
||||
echo "========================================="
|
||||
echo "LCBP3-DMS Blue-Green Deployment (v1.8.1)"
|
||||
echo "========================================="
|
||||
echo "Current environment: $CURRENT"
|
||||
echo "Target environment: $TARGET"
|
||||
echo "========================================="
|
||||
|
||||
# Step 1: Backup database
|
||||
echo "[1/9] Creating database backup..."
|
||||
BACKUP_DIR="$LCBP3_DIR/shared/backups"
|
||||
mkdir -p "$BACKUP_DIR"
|
||||
BACKUP_FILE="$BACKUP_DIR/db-backup-$(date +%Y%m%d-%H%M%S).sql"
|
||||
|
||||
# Note: DB_PASSWORD should be in environment or .env
|
||||
if [ -z "$DB_PASSWORD" ]; then
|
||||
echo "Warning: DB_PASSWORD not found in environment. Attempting to source from .env..."
|
||||
if [ -f "$LCBP3_DIR/$CURRENT/.env.production" ]; then
|
||||
export $(grep DB_PASSWORD "$LCBP3_DIR/$CURRENT/.env.production" | xargs)
|
||||
fi
|
||||
fi
|
||||
|
||||
docker exec lcbp3-mariadb mysqldump -u root -p"${DB_PASSWORD}" lcbp3_dms > "$BACKUP_FILE"
|
||||
gzip "$BACKUP_FILE"
|
||||
echo "✓ Backup created: $BACKUP_FILE.gz"
|
||||
|
||||
# Step 2: Pull latest images from registry
|
||||
echo "[2/9] Pulling latest Docker images from internal registry..."
|
||||
cd "$LCBP3_DIR/$TARGET"
|
||||
docker-compose pull
|
||||
echo "✓ Images pulled"
|
||||
|
||||
# Step 3: Update configuration
|
||||
echo "[3/9] Updating configuration..."
|
||||
if [ -f "$LCBP3_DIR/.env.production.new" ]; then
|
||||
cp "$LCBP3_DIR/.env.production.new" "$LCBP3_DIR/$TARGET/.env.production"
|
||||
rm "$LCBP3_DIR/.env.production.new"
|
||||
echo "✓ Configuration updated from .env.production.new"
|
||||
fi
|
||||
|
||||
# Step 4: Start target environment
|
||||
echo "[4/9] Starting $TARGET environment..."
|
||||
docker-compose up -d
|
||||
echo "✓ $TARGET environment started"
|
||||
|
||||
# Step 5: Wait for services to be ready
|
||||
echo "[5/9] Waiting for services to be healthy..."
|
||||
sleep 15
|
||||
|
||||
# Check backend health
|
||||
for i in {1..30}; do
|
||||
if docker exec lcbp3-${TARGET}-backend curl -f http://localhost:3000/health > /dev/null 2>&1; then
|
||||
echo "✓ Backend is healthy"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 30 ]; then
|
||||
echo "✗ Backend health check failed!"
|
||||
docker-compose logs backend
|
||||
exit 1
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
|
||||
# Step 6: Run database migrations (ADR-009)
|
||||
echo "[6/9] Running database migrations..."
|
||||
# Note: Following ADR-009, this might be a no-op if manual SQL is preferred,
|
||||
# but keeping it for DTO/Entity alignment checks.
|
||||
docker exec lcbp3-${TARGET}-backend npm run start:prod -- --migration-run || echo "Migration check complete"
|
||||
echo "✓ Migrations stage complete"
|
||||
|
||||
# Step 7: Switch NGINX to target environment
|
||||
echo "[7/9] Switching NGINX to $TARGET..."
|
||||
NGINX_CONF="$LCBP3_DIR/nginx-proxy/nginx.conf"
|
||||
if [ -f "$NGINX_CONF" ]; then
|
||||
sed -i "s/lcbp3-${CURRENT}-backend/lcbp3-${TARGET}-backend/g" "$NGINX_CONF"
|
||||
sed -i "s/lcbp3-${CURRENT}-frontend/lcbp3-${TARGET}-frontend/g" "$NGINX_CONF"
|
||||
docker exec lcbp3-nginx nginx -t
|
||||
docker exec lcbp3-nginx nginx -s reload
|
||||
echo "✓ NGINX switched to $TARGET"
|
||||
else
|
||||
echo "Warning: NGINX config not found at $NGINX_CONF. Skipping switch."
|
||||
fi
|
||||
|
||||
# Step 8: Verify new environment
|
||||
echo "[8/9] Verifying new environment via Proxy..."
|
||||
sleep 5
|
||||
# Attempt to curl via the local proxy or direct container
|
||||
if docker exec lcbp3-nginx curl -f -k http://lcbp3-${TARGET}-backend:3000/health > /dev/null 2>&1; then
|
||||
echo "✓ New environment is responding via internal network"
|
||||
else
|
||||
echo "✗ New environment verification failed!"
|
||||
echo "Rolling back..."
|
||||
# Call rollback script if it exists
|
||||
if [ -f "$LCBP3_DIR/scripts/rollback.sh" ]; then
|
||||
"$LCBP3_DIR/scripts/rollback.sh"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Step 9: Stop old environment
|
||||
echo "[9/9] Stopping $CURRENT environment..."
|
||||
cd "$LCBP3_DIR/$CURRENT"
|
||||
docker-compose down
|
||||
echo "✓ $CURRENT environment stopped"
|
||||
|
||||
# Update current pointer
|
||||
echo "$TARGET" > "$CURRENT_FILE"
|
||||
|
||||
echo "========================================="
|
||||
echo "✓ Deployment completed successfully!"
|
||||
echo "Active environment: $TARGET"
|
||||
echo "========================================="
|
||||
@@ -0,0 +1,51 @@
|
||||
#!/bin/bash
|
||||
|
||||
# File: scripts/rollback.sh
|
||||
# LCBP3-DMS Rollback Script
|
||||
# v1.8.1
|
||||
|
||||
set -e
|
||||
|
||||
LCBP3_DIR="/volume1/lcbp3"
|
||||
CURRENT_FILE="$LCBP3_DIR/current"
|
||||
CURRENT=$(cat "$CURRENT_FILE")
|
||||
PREVIOUS=$([[ "$CURRENT" == "blue" ]] && echo "green" || echo "blue")
|
||||
|
||||
echo "========================================="
|
||||
echo "LCBP3-DMS Rollback (v1.8.1)"
|
||||
echo "========================================="
|
||||
echo "Current: $CURRENT"
|
||||
echo "Rolling back to: $PREVIOUS"
|
||||
echo "========================================="
|
||||
|
||||
# Switch NGINX back
|
||||
echo "[1/3] Switching NGINX to $PREVIOUS..."
|
||||
NGINX_CONF="$LCBP3_DIR/nginx-proxy/nginx.conf"
|
||||
if [ -f "$NGINX_CONF" ]; then
|
||||
sed -i "s/lcbp3-${CURRENT}-backend/lcbp3-${PREVIOUS}-backend/g" "$NGINX_CONF"
|
||||
sed -i "s/lcbp3-${CURRENT}-frontend/lcbp3-${PREVIOUS}-frontend/g" "$NGINX_CONF"
|
||||
docker exec lcbp3-nginx nginx -s reload
|
||||
echo "✓ NGINX switched back to $PREVIOUS"
|
||||
fi
|
||||
|
||||
# Start previous environment if stopped
|
||||
echo "[2/3] Ensuring $PREVIOUS environment is running..."
|
||||
cd "$LCBP3_DIR/$PREVIOUS"
|
||||
docker-compose up -d
|
||||
sleep 15
|
||||
echo "✓ $PREVIOUS environment is running"
|
||||
|
||||
# Verify
|
||||
echo "[3/3] Verifying rollback..."
|
||||
if docker exec lcbp3-nginx curl -f -k http://lcbp3-${PREVIOUS}-backend:3000/health > /dev/null 2>&1; then
|
||||
echo "✓ Rollback successful"
|
||||
echo "$PREVIOUS" > "$CURRENT_FILE"
|
||||
else
|
||||
echo "✗ Rollback verification failed!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "========================================="
|
||||
echo "✓ Rollback completed"
|
||||
echo "Active environment: $PREVIOUS"
|
||||
echo "========================================="
|
||||
Reference in New Issue
Block a user