260228:1412 20260228: setup n8n
All checks were successful
Build and Deploy / deploy (push) Successful in 2m49s

This commit is contained in:
admin
2026-02-28 14:12:48 +07:00
parent 9ddafbb1ac
commit 276d06e950
27 changed files with 3434 additions and 2313 deletions

View File

@@ -48,6 +48,7 @@ import { MonitoringModule } from './modules/monitoring/monitoring.module';
import { ResilienceModule } from './common/resilience/resilience.module'; import { ResilienceModule } from './common/resilience/resilience.module';
import { SearchModule } from './modules/search/search.module'; import { SearchModule } from './modules/search/search.module';
import { AuditLogModule } from './modules/audit-log/audit-log.module'; import { AuditLogModule } from './modules/audit-log/audit-log.module';
import { MigrationModule } from './modules/migration/migration.module';
@Module({ @Module({
imports: [ imports: [
@@ -158,6 +159,7 @@ import { AuditLogModule } from './modules/audit-log/audit-log.module';
NotificationModule, NotificationModule,
DashboardModule, DashboardModule,
AuditLogModule, AuditLogModule,
MigrationModule,
], ],
controllers: [AppController], controllers: [AppController],
providers: [ providers: [

View File

@@ -64,11 +64,16 @@ describe('FileStorageService', () => {
attachmentRepo = module.get(getRepositoryToken(Attachment)); attachmentRepo = module.get(getRepositoryToken(Attachment));
jest.clearAllMocks(); jest.clearAllMocks();
(fs.ensureDirSync as jest.Mock).mockReturnValue(true); (fs.ensureDirSync as unknown as jest.Mock).mockReturnValue(true);
(fs.writeFile as jest.Mock).mockResolvedValue(undefined); (fs.writeFile as unknown as jest.Mock).mockResolvedValue(undefined);
(fs.pathExists as jest.Mock).mockResolvedValue(true); (fs.pathExists as unknown as jest.Mock).mockResolvedValue(true);
(fs.move as jest.Mock).mockResolvedValue(undefined); (fs.move as unknown as jest.Mock).mockResolvedValue(undefined);
(fs.remove as jest.Mock).mockResolvedValue(undefined); (fs.remove as unknown as jest.Mock).mockResolvedValue(undefined);
(fs.readFile as unknown as jest.Mock).mockResolvedValue(
Buffer.from('test')
);
(fs.stat as unknown as jest.Mock).mockResolvedValue({ size: 1024 });
(fs.ensureDir as unknown as jest.Mock).mockResolvedValue(undefined);
}); });
it('should be defined', () => { it('should be defined', () => {
@@ -86,7 +91,7 @@ describe('FileStorageService', () => {
}); });
it('should throw BadRequestException if write fails', async () => { it('should throw BadRequestException if write fails', async () => {
(fs.writeFile as jest.Mock).mockRejectedValueOnce( (fs.writeFile as unknown as jest.Mock).mockRejectedValueOnce(
new Error('Write error') new Error('Write error')
); );
await expect(service.upload(mockFile, 1)).rejects.toThrow( await expect(service.upload(mockFile, 1)).rejects.toThrow(

View File

@@ -201,6 +201,77 @@ export class FileStorageService {
return crypto.createHash('sha256').update(buffer).digest('hex'); return crypto.createHash('sha256').update(buffer).digest('hex');
} }
/**
* ✅ NEW: Import Staging File (For Legacy Migration)
* ย้ายไฟล์จาก staging_ai ไปยัง permanent storage โดยตรง
*/
async importStagingFile(
sourceFilePath: string,
userId: number,
options?: { issueDate?: Date; documentType?: string }
): Promise<Attachment> {
if (!(await fs.pathExists(sourceFilePath))) {
this.logger.error(`Staging file not found: ${sourceFilePath}`);
throw new NotFoundException(`Source file not found: ${sourceFilePath}`);
}
// 1. Get file stats & checksum
const stats = await fs.stat(sourceFilePath);
const fileExt = path.extname(sourceFilePath);
const originalFilename = path.basename(sourceFilePath);
const storedFilename = `${uuidv4()}${fileExt}`;
// Determine mime type basic
let mimeType = 'application/octet-stream';
if (fileExt.toLowerCase() === '.pdf') mimeType = 'application/pdf';
else if (fileExt.toLowerCase() === '.xlsx')
mimeType =
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet';
const fileBuffer = await fs.readFile(sourceFilePath);
const checksum = this.calculateChecksum(fileBuffer);
// 2. Generate Permanent Path
const refDate = options?.issueDate || new Date();
const effectiveDate = isNaN(refDate.getTime()) ? new Date() : refDate;
const year = effectiveDate.getFullYear().toString();
const month = (effectiveDate.getMonth() + 1).toString().padStart(2, '0');
const docTypeFolder = options?.documentType || 'General';
const permanentDir = path.join(
this.permanentDir,
docTypeFolder,
year,
month
);
await fs.ensureDir(permanentDir);
const newPath = path.join(permanentDir, storedFilename);
// 3. Move File
try {
await fs.move(sourceFilePath, newPath, { overwrite: true });
} catch (error) {
this.logger.error(`Failed to move staging file to ${newPath}`, error);
throw new BadRequestException('Failed to process staging file');
}
// 4. Create Database Record
const attachment = this.attachmentRepository.create({
originalFilename,
storedFilename,
filePath: newPath,
mimeType,
fileSize: stats.size,
isTemporary: false,
referenceDate: effectiveDate,
checksum,
uploadedByUserId: userId,
});
return this.attachmentRepository.save(attachment);
}
/** /**
* ✅ NEW: Delete File * ✅ NEW: Delete File
* ลบไฟล์ออกจาก Disk และ Database * ลบไฟล์ออกจาก Disk และ Database

View File

@@ -76,7 +76,7 @@ describe('CorrespondenceController', () => {
const createDto = { const createDto = {
projectId: 1, projectId: 1,
typeId: 1, typeId: 1,
title: 'Test Subject', subject: 'Test Subject',
}; };
const result = await controller.create( const result = await controller.create(

View File

@@ -41,9 +41,9 @@ describe('ManualOverrideService', () => {
originatorOrganizationId: 2, originatorOrganizationId: 2,
recipientOrganizationId: 3, recipientOrganizationId: 3,
correspondenceTypeId: 4, correspondenceTypeId: 4,
subTypeId: null, subTypeId: 5,
rfaTypeId: null, rfaTypeId: 6,
disciplineId: null, disciplineId: 7,
resetScope: 'YEAR_2024', resetScope: 'YEAR_2024',
newLastNumber: 999, newLastNumber: 999,
reason: 'System sync', reason: 'System sync',

View File

@@ -0,0 +1,44 @@
import {
IsString,
IsNotEmpty,
IsOptional,
IsNumber,
IsObject,
} from 'class-validator';
export class ImportCorrespondenceDto {
@IsString()
@IsNotEmpty()
document_number!: string;
@IsString()
@IsNotEmpty()
title!: string;
@IsString()
@IsNotEmpty()
category!: string;
@IsString()
@IsNotEmpty()
source_file_path!: string;
@IsNumber()
@IsOptional()
ai_confidence?: number;
@IsOptional()
ai_issues?: any;
@IsString()
@IsNotEmpty()
migrated_by!: string; // "SYSTEM_IMPORT"
@IsString()
@IsNotEmpty()
batch_id!: string;
@IsObject()
@IsOptional()
details?: Record<string, any>;
}

View File

@@ -0,0 +1,29 @@
import {
Entity,
PrimaryGeneratedColumn,
Column,
CreateDateColumn,
Index,
} from 'typeorm';
@Entity('import_transactions')
export class ImportTransaction {
@PrimaryGeneratedColumn()
id!: number;
@Index('idx_idem_key', { unique: true })
@Column({ name: 'idempotency_key', length: 255, unique: true })
idempotencyKey!: string;
@Column({ name: 'document_number', length: 100, nullable: true })
documentNumber!: string;
@Column({ name: 'batch_id', length: 100, nullable: true })
batchId!: string;
@Column({ name: 'status_code', default: 201 })
statusCode!: number;
@CreateDateColumn({ name: 'created_at' })
createdAt!: Date;
}

View File

@@ -0,0 +1,58 @@
import { Test, TestingModule } from '@nestjs/testing';
import { MigrationController } from './migration.controller';
import { MigrationService } from './migration.service';
import { ImportCorrespondenceDto } from './dto/import-correspondence.dto';
describe('MigrationController', () => {
let controller: MigrationController;
let service: MigrationService;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
controllers: [MigrationController],
providers: [
{
provide: MigrationService,
useValue: {
importCorrespondence: jest
.fn()
.mockResolvedValue({ message: 'Success' }),
},
},
],
}).compile();
controller = module.get<MigrationController>(MigrationController);
service = module.get<MigrationService>(MigrationService);
});
it('should be defined', () => {
expect(controller).toBeDefined();
});
it('should call importCorrespondence on service', async () => {
const dto: ImportCorrespondenceDto = {
document_number: 'DOC-001',
title: 'Legacy Record',
category: 'Correspondence',
source_file_path: '/staging_ai/test.pdf',
migrated_by: 'SYSTEM_IMPORT',
batch_id: 'batch1',
};
const idempotencyKey = 'key123';
const user = { userId: 5 };
const result = await controller.importCorrespondence(
dto,
idempotencyKey,
user
);
expect(result).toEqual({ message: 'Success' });
expect(service.importCorrespondence).toHaveBeenCalledWith(
dto,
idempotencyKey,
5
);
});
});

View File

@@ -0,0 +1,30 @@
import { Controller, Post, Body, Headers, UseGuards } from '@nestjs/common';
import { MigrationService } from './migration.service';
import { ImportCorrespondenceDto } from './dto/import-correspondence.dto';
import { JwtAuthGuard } from '../../common/guards/jwt-auth.guard';
import { CurrentUser } from '../../common/decorators/current-user.decorator';
import { ApiTags, ApiOperation, ApiBearerAuth, ApiHeader } from '@nestjs/swagger';
@ApiTags('Migration')
@ApiBearerAuth()
@Controller('migration')
export class MigrationController {
constructor(private readonly migrationService: MigrationService) {}
@Post('import')
@UseGuards(JwtAuthGuard)
@ApiOperation({ summary: 'Import generic legacy correspondence record via n8n integration' })
@ApiHeader({
name: 'Idempotency-Key',
description: 'Unique key per document and batch to prevent duplicate inserts',
required: true,
})
async importCorrespondence(
@Body() dto: ImportCorrespondenceDto,
@Headers('idempotency-key') idempotencyKey: string,
@CurrentUser() user: any
) {
const userId = user?.id || user?.userId || 5;
return this.migrationService.importCorrespondence(dto, idempotencyKey, userId);
}
}

View File

@@ -0,0 +1,28 @@
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { MigrationController } from './migration.controller';
import { MigrationService } from './migration.service';
import { ImportTransaction } from './entities/import-transaction.entity';
import { Correspondence } from '../correspondence/entities/correspondence.entity';
import { CorrespondenceRevision } from '../correspondence/entities/correspondence-revision.entity';
import { CorrespondenceType } from '../correspondence/entities/correspondence-type.entity';
import { CorrespondenceStatus } from '../correspondence/entities/correspondence-status.entity';
import { Project } from '../project/entities/project.entity';
// Import any other required modules for JwtAuthGuard (usually AuthModule or similar, but global guards handle this mostly)
@Module({
imports: [
TypeOrmModule.forFeature([
ImportTransaction,
Correspondence,
CorrespondenceRevision,
CorrespondenceType,
CorrespondenceStatus,
Project,
]),
],
controllers: [MigrationController],
providers: [MigrationService],
exports: [MigrationService],
})
export class MigrationModule {}

View File

@@ -0,0 +1,83 @@
import { Test, TestingModule } from '@nestjs/testing';
import { MigrationService } from './migration.service';
import { getRepositoryToken } from '@nestjs/typeorm';
import { ImportTransaction } from './entities/import-transaction.entity';
import { CorrespondenceType } from '../correspondence/entities/correspondence-type.entity';
import { CorrespondenceStatus } from '../correspondence/entities/correspondence-status.entity';
import { Project } from '../project/entities/project.entity';
import { DataSource } from 'typeorm';
describe('MigrationService', () => {
let service: MigrationService;
const mockTransactionRepo = {
findOne: jest.fn(),
create: jest.fn(),
save: jest.fn(),
};
const mockTypeRepo = {
findOne: jest.fn(),
};
const mockStatusRepo = {
findOne: jest.fn(),
};
const mockProjectRepo = {
findOne: jest.fn(),
};
const mockQueryRunner = {
connect: jest.fn(),
startTransaction: jest.fn(),
commitTransaction: jest.fn(),
rollbackTransaction: jest.fn(),
release: jest.fn(),
manager: {
findOne: jest.fn(),
create: jest.fn(),
save: jest.fn(),
count: jest.fn(),
update: jest.fn(),
},
};
const mockDataSource = {
createQueryRunner: jest.fn().mockReturnValue(mockQueryRunner),
};
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [
MigrationService,
{
provide: getRepositoryToken(ImportTransaction),
useValue: mockTransactionRepo,
},
{
provide: getRepositoryToken(CorrespondenceType),
useValue: mockTypeRepo,
},
{
provide: getRepositoryToken(CorrespondenceStatus),
useValue: mockStatusRepo,
},
{
provide: getRepositoryToken(Project),
useValue: mockProjectRepo,
},
{
provide: DataSource,
useValue: mockDataSource,
},
],
}).compile();
service = module.get<MigrationService>(MigrationService);
});
it('should be defined', () => {
expect(service).toBeDefined();
});
});

View File

@@ -0,0 +1,244 @@
import {
Injectable,
Logger,
ConflictException,
BadRequestException,
InternalServerErrorException,
} from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository, DataSource } from 'typeorm';
import { ImportCorrespondenceDto } from './dto/import-correspondence.dto';
import { ImportTransaction } from './entities/import-transaction.entity';
import { Correspondence } from '../correspondence/entities/correspondence.entity';
import { CorrespondenceRevision } from '../correspondence/entities/correspondence-revision.entity';
import { CorrespondenceType } from '../correspondence/entities/correspondence-type.entity';
import { CorrespondenceStatus } from '../correspondence/entities/correspondence-status.entity';
import { Project } from '../project/entities/project.entity';
import { FileStorageService } from '../../common/file-storage/file-storage.service';
@Injectable()
export class MigrationService {
private readonly logger = new Logger(MigrationService.name);
constructor(
private readonly dataSource: DataSource,
@InjectRepository(ImportTransaction)
private readonly importTransactionRepo: Repository<ImportTransaction>,
@InjectRepository(CorrespondenceType)
private readonly correspondenceTypeRepo: Repository<CorrespondenceType>,
@InjectRepository(CorrespondenceStatus)
private readonly correspondenceStatusRepo: Repository<CorrespondenceStatus>,
@InjectRepository(Project)
private readonly projectRepo: Repository<Project>,
private readonly fileStorageService: FileStorageService
) {}
async importCorrespondence(
dto: ImportCorrespondenceDto,
idempotencyKey: string,
userId: number
) {
if (!idempotencyKey) {
throw new BadRequestException('Idempotency-Key header is required');
}
// 1. Idempotency Check
const existingTransaction = await this.importTransactionRepo.findOne({
where: { idempotencyKey },
});
if (existingTransaction) {
if (existingTransaction.statusCode === 201) {
this.logger.log(
`Idempotency key ${idempotencyKey} already processed. Returning cached success.`
);
return {
message: 'Already processed',
transaction: existingTransaction,
};
} else {
throw new ConflictException(
`Transaction failed previously with status ${existingTransaction.statusCode}`
);
}
}
// 2. Fetch Dependencies
const type = await this.correspondenceTypeRepo.findOne({
where: { typeName: dto.category },
});
// If exact name isn't found, try typeCode just in case
const typeId = type
? type.id
: (
await this.correspondenceTypeRepo.findOne({
where: { typeCode: dto.category },
})
)?.id;
if (!typeId) {
throw new BadRequestException(
`Category "${dto.category}" not found in system.`
);
}
// Migrate documents typically end up as 'Closed by Owner' or a similar terminal state, unless specifically pending.
// For legacy, let's use a default terminal status 'CLBOWN' if available. If not, fallback to 'DRAFT'.
let status = await this.correspondenceStatusRepo.findOne({
where: { statusCode: 'CLBOWN' },
});
if (!status) {
status = await this.correspondenceStatusRepo.findOne({
where: { statusCode: 'DRAFT' },
});
}
if (!status) {
throw new InternalServerErrorException(
'CRITICAL: No default correspondence status found (missing CLBOWN/DRAFT)'
);
}
// We assume migration runs for LCBP3 project
const project = await this.projectRepo.findOne({
where: { projectCode: 'LCBP3' },
});
if (!project) {
throw new InternalServerErrorException(
'Project LCBP3 not found in database'
);
}
const queryRunner = this.dataSource.createQueryRunner();
await queryRunner.connect();
await queryRunner.startTransaction();
try {
// 3. Find or Create Correspondence
let correspondence = await queryRunner.manager.findOne(Correspondence, {
where: {
correspondenceNumber: dto.document_number,
projectId: project.id,
},
});
if (!correspondence) {
correspondence = queryRunner.manager.create(Correspondence, {
correspondenceNumber: dto.document_number,
correspondenceTypeId: typeId,
projectId: project.id,
isInternal: false,
createdBy: userId,
});
await queryRunner.manager.save(correspondence);
}
// 4. File Handling
// We will map the source file and create an Attachment record using FileStorageService
// For legacy migrations, we pass document_number mapping logic or basic processing
let attachmentId: number | null = null;
if (dto.source_file_path) {
try {
const attachment = await this.fileStorageService.importStagingFile(
dto.source_file_path,
userId,
{ documentType: dto.category } // use category from DTO directly
);
attachmentId = attachment.id;
} catch (fileError: unknown) {
const errMsg =
fileError instanceof Error ? fileError.message : String(fileError);
this.logger.warn(
`Failed to import file for [${dto.document_number}], continuing without attachment: ${errMsg}`
);
}
}
// 5. Create Revision
const revisionCount = await queryRunner.manager.count(
CorrespondenceRevision,
{
where: { correspondenceId: correspondence.id },
}
);
// Determine revision number. Support mapping multiple batches to the same document number by incrementing revision.
const revNum = revisionCount;
const revision = queryRunner.manager.create(CorrespondenceRevision, {
correspondenceId: correspondence.id,
revisionNumber: revNum,
revisionLabel: revNum === 0 ? '0' : revNum.toString(),
isCurrent: true,
statusId: status.id,
subject: dto.title,
description: 'Migrated from legacy system via Auto Ingest',
details: {
...dto.details,
ai_confidence: dto.ai_confidence,
ai_issues: dto.ai_issues as unknown,
source_file_path: dto.source_file_path,
attachment_id: attachmentId, // Link attachment ID if successful
},
schemaVersion: 1,
createdBy: userId, // Bot ID
});
if (revisionCount > 0) {
await queryRunner.manager.update(
CorrespondenceRevision,
{ correspondenceId: correspondence.id, isCurrent: true },
{ isCurrent: false }
);
}
await queryRunner.manager.save(revision);
// 5. Track Transaction
const transaction = queryRunner.manager.create(ImportTransaction, {
idempotencyKey,
documentNumber: dto.document_number,
batchId: dto.batch_id,
statusCode: 201,
});
await queryRunner.manager.save(transaction);
await queryRunner.commitTransaction();
this.logger.log(
`Ingested document [${dto.document_number}] successfully (Batch: ${dto.batch_id})`
);
return {
message: 'Import successful',
correspondenceId: correspondence.id,
revisionId: revision.id,
transactionId: transaction.id,
};
} catch (error: unknown) {
await queryRunner.rollbackTransaction();
const errorMessage =
error instanceof Error ? error.message : String(error);
const errorStack = error instanceof Error ? error.stack : undefined;
this.logger.error(
`Import failed for document [${dto.document_number}]: ${errorMessage}`,
errorStack
);
const failedTransaction = this.importTransactionRepo.create({
idempotencyKey,
documentNumber: dto.document_number,
batchId: dto.batch_id,
statusCode: 500,
});
await this.importTransactionRepo.save(failedTransaction).catch(() => {});
throw new InternalServerErrorException(
'Migration import failed: ' + errorMessage
);
} finally {
await queryRunner.release();
}
}
}

View File

@@ -46,7 +46,7 @@ describe('ProjectController', () => {
const mockResult = { data: [], meta: {} }; const mockResult = { data: [], meta: {} };
(mockProjectService.findAll as jest.Mock).mockResolvedValue(mockResult); (mockProjectService.findAll as jest.Mock).mockResolvedValue(mockResult);
const result = await controller.findAll({}); const result = await controller.findAll({ page: 1, limit: 10 });
expect(mockProjectService.findAll).toHaveBeenCalled(); expect(mockProjectService.findAll).toHaveBeenCalled();
}); });

View File

@@ -12,7 +12,7 @@ describe('WorkflowDslParser', () => {
beforeEach(async () => { beforeEach(async () => {
mockRepository = { mockRepository = {
save: jest.fn((def) => Promise.resolve(def)), save: jest.fn((def) => Promise.resolve(def)) as unknown as jest.Mock,
findOne: jest.fn(), findOne: jest.fn(),
}; };
@@ -160,7 +160,7 @@ describe('WorkflowDslParser', () => {
expect(result.valid).toBe(false); expect(result.valid).toBe(false);
expect(result.errors).toBeDefined(); expect(result.errors).toBeDefined();
expect(result.errors.length).toBeGreaterThan(0); expect(result.errors?.length).toBeGreaterThan(0);
}); });
}); });
@@ -184,7 +184,7 @@ describe('WorkflowDslParser', () => {
it('should throw error if definition not found', async () => { it('should throw error if definition not found', async () => {
mockRepository.findOne = jest.fn().mockResolvedValue(null); mockRepository.findOne = jest.fn().mockResolvedValue(null);
await expect(parser.getParsedDsl(999)).rejects.toThrow( await expect(parser.getParsedDsl('999')).rejects.toThrow(
BadRequestException BadRequestException
); );
}); });

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@
| ------------------------------------------------------------------ | ------- | | ------------------------------------------------------------------ | ------- |
| legacy PDF document migration to system v1.8.0 uses n8n and Ollama | 1.8.0 | | legacy PDF document migration to system v1.8.0 uses n8n and Ollama | 1.8.0 |
> **Note:** Category Enum system-driven, Idempotency Contract, Duplicate Handling Clarification, Storage Enforcement, Audit Log Enhancement, Review Queue Integration, Revision Drift Protection, Execution Time, Encoding Normalization, Security Hardening, AI Physical Isolation (ASUSTOR), Folder Standard (/data/dms) > **Note:** Category Enum system-driven, Idempotency Contract, Duplicate Handling Clarification, Storage Enforcement, Audit Log Enhancement, Review Queue Integration, Revision Drift Protection, Execution Time, Encoding Normalization, Security Hardening, Orchestrator on QNAP, AI Physical Isolation (Desktop Desk-5439), Folder Standard (/share/np-dms/n8n)
--- ---
@@ -21,8 +21,8 @@
## 2. โครงสร้างพื้นฐาน (Migration Infrastructure) ## 2. โครงสร้างพื้นฐาน (Migration Infrastructure)
- **Migration Orchestrator:** n8n (รันจาก Docker Container บน ASUSTOR NAS) - **Migration Orchestrator:** n8n (รันจาก Docker Container บน QNAP NAS)
- **AI Validator:** Ollama (รันใน Internal Network บน ASUSTOR NAS) - **AI Validator:** Ollama (รันใน Internal Network บน Desktop Desk-5439, RTX 2060 SUPER 8GB)
- **Target Database:** MariaDB (`correspondences` table) บน QNAP NAS - **Target Database:** MariaDB (`correspondences` table) บน QNAP NAS
- **Target Storage:** QNAP File System — **ผ่าน Backend StorageService API เท่านั้น** (ห้าม move file โดยตรง) - **Target Storage:** QNAP File System — **ผ่าน Backend StorageService API เท่านั้น** (ห้าม move file โดยตรง)
- **Connection:** 2.5G LAN + LACP / Internal VLAN - **Connection:** 2.5G LAN + LACP / Internal VLAN
@@ -35,18 +35,18 @@
**File Migration:** **File Migration:**
- ย้ายไฟล์ PDF ทั้งหมดจากแหล่งเก็บไปยัง Folder ชั่วคราวบน NAS (QNAP) - ย้ายไฟล์ PDF ทั้งหมดจากแหล่งเก็บไปยัง Folder ชั่วคราวบน NAS (QNAP)
- Target Path: `/data/dms/staging_ai/` - Target Path: `/share/np-dms/staging_ai/`
**Mount Folder:** **Mount Folder:**
- Bind Mount `/data/dms/staging_ai/` เข้ากับ n8n Container แบบ **read-only** - Bind Mount `/share/np-dms/staging_ai/` เข้ากับ n8n Container แบบ **read-only**
- สร้าง `/data/dms/migration_logs/` Volume แยกสำหรับเขียน Log แบบ **read-write** - สร้าง `/share/np-dms/n8n/migration_logs/` Volume แยกสำหรับเขียน Log แบบ **read-write**
**Ollama Config:** **Ollama Config:**
- ติดตั้ง Ollama บน ASUSTOR NAS - ติดตั้ง Ollama บน Desktop (Desk-5439, RTX 2060 SUPER 8GB)
- No DB credentials, Internal network only - No DB credentials, Internal network only
```bash ```bash
# แนะนำ: llama3.2:3b (เร็ว, VRAM ~3GB, เหมาะ Classification) # แนะนำ: llama3.2:3b (เร็ว, VRAM ~3GB, เหมาะ Classification) หรือ ollama run llama3.2:3b
ollama pull llama3.2:3b ollama pull llama3.2:3b
# Fallback: mistral:7b-instruct-q4_K_M (แม่นกว่า, VRAM ~5GB) # Fallback: mistral:7b-instruct-q4_K_M (แม่นกว่า, VRAM ~5GB)
@@ -55,7 +55,7 @@ ollama pull llama3.2:3b
**ทดสอบ Ollama:** **ทดสอบ Ollama:**
```bash ```bash
curl http://<OLLAMA_HOST>:11434/api/generate \ curl http://192.168.20.100:11434/api/generate \
-d '{"model":"llama3.2:3b","prompt":"reply: ok","stream":false}' -d '{"model":"llama3.2:3b","prompt":"reply: ok","stream":false}'
``` ```
@@ -165,7 +165,9 @@ return items.map(item => ({
json: { json: {
...item.json, ...item.json,
document_number: normalize(item.json.document_number), document_number: normalize(item.json.document_number),
title: normalize(item.json.title) title: normalize(item.json.title),
// Mapping เลขอ้างอิงเก่า (Legacy Number) เพื่อนำไปเก็บใน details JSON
legacy_document_number: item.json.document_number
} }
})); }));
``` ```
@@ -174,7 +176,7 @@ return items.map(item => ({
- ตรวจสอบไฟล์ PDF มีอยู่จริงบน NAS - ตรวจสอบไฟล์ PDF มีอยู่จริงบน NAS
- Normalize ชื่อไฟล์เป็น **UTF-8 NFC** - Normalize ชื่อไฟล์เป็น **UTF-8 NFC**
- Path Traversal Guard: resolved path ต้องอยู่ใน `/data/dms/staging_ai` เท่านั้น - Path Traversal Guard: resolved path ต้องอยู่ใน `/share/np-dms/staging_ai` เท่านั้น
- **Output 0** → valid → Node 3 - **Output 0** → valid → Node 3
- **Output 1** → error → Node 5D (ไม่หายเงียบ) - **Output 1** → error → Node 5D (ไม่หายเงียบ)
@@ -246,7 +248,7 @@ if (item.json.excel_revision !== undefined) {
#### Node 5A: Auto Ingest — Backend API #### Node 5A: Auto Ingest — Backend API
> ⚠️ **Storage Enforcement:** n8n ส่งแค่ `source_file_path` — Backend จะ generate UUID, enforce path strategy (`/data/dms/uploads/YYYY/MM/{uuid}.pdf`), และ move file atomically ผ่าน StorageService > ⚠️ **Storage Enforcement:** n8n ส่งแค่ `source_file_path` — Backend จะ generate UUID, enforce path strategy (`/share/np-dms/staging_ai/...`), และ move file atomically ผ่าน StorageService
```http ```http
POST /api/correspondences/import POST /api/correspondences/import
@@ -303,9 +305,9 @@ Review → Admin Approve → POST /api/correspondences/import (เหมือ
Admin Reject → ลบออกจาก queue ไม่สร้าง record Admin Reject → ลบออกจาก queue ไม่สร้าง record
``` ```
#### Node 5C: Reject Log → `/data/dms/migration_logs/reject_log.csv` #### Node 5C: Reject Log → `/share/np-dms/n8n/migration_logs/reject_log.csv`
#### Node 5D: Error Log → `/data/dms/migration_logs/error_log.csv` + MariaDB #### Node 5D: Error Log → `/share/np-dms/n8n/migration_logs/error_log.csv` + MariaDB
--- ---
@@ -370,7 +372,7 @@ SELECT ROW_COUNT();
COMMIT; COMMIT;
``` ```
**Step 3:** ย้ายไฟล์กลับ `/data/dms/staging_ai/` ผ่าน Script แยก **Step 3:** ย้ายไฟล์กลับ `/share/np-dms/staging_ai/` ผ่าน Script แยก
**Step 4:** Reset State **Step 4:** Reset State
```sql ```sql
@@ -424,4 +426,4 @@ GROUP BY idempotency_key HAVING COUNT(*) > 1;
--- ---
> **ข้อแนะนำด้าน Physical Storage:** ไฟล์ PDF ทั้ง 20,000 ไฟล์จะถูก move โดย Backend StorageService ไปยัง path ที่ถูกต้องโดยอัตโนมัติ ไม่ปล่อยค้างไว้ที่ `/data/dms/staging_ai/` > **ข้อแนะนำด้าน Physical Storage:** ไฟล์ PDF ทั้ง 20,000 ไฟล์จะถูก move โดย Backend StorageService ไปยัง path ที่ถูกต้องโดยอัตโนมัติ ไม่ปล่อยค้างไว้ที่ `/share/np-dms/staging_ai/`

View File

@@ -2,77 +2,103 @@
เอกสารนี้จัดทำขึ้นเพื่อรองรับการ Migration เอกสาร PDF 20,000 ฉบับ ตามแผนใน `03-04-legacy-data-migration.md` และ `ADR-017-ollama-data-migration.md` เอกสารนี้จัดทำขึ้นเพื่อรองรับการ Migration เอกสาร PDF 20,000 ฉบับ ตามแผนใน `03-04-legacy-data-migration.md` และ `ADR-017-ollama-data-migration.md`
> **Note:** Category Enum system-driven, Idempotency-Key Header, Storage Enforcement, Audit Log, Encoding Normalization, Security Hardening, Nginx Rate Limit, Docker Hardening, AI Physical Isolation (ASUSTOR), Folder Standard (/data/dms) > **Note:** Category Enum system-driven, Idempotency-Key Header, Storage Enforcement, Audit Log, Encoding Normalization, Security Hardening, Nginx Rate Limit, Docker Hardening, Orchestrator on QNAP, AI Physical Isolation (Desktop Desk-5439), Folder Standard (/share/np-dms/n8n)
--- ---
## 📌 ส่วนที่ 1: การติดตั้งและตั้งค่าเบื้องต้น ## 📌 ส่วนที่ 1: การติดตั้งและตั้งค่าเบื้องต้น
### 1.1 ติดตั้ง n8n บน ASUSTOR NAS (Docker) ### 1.1 ปรับปรุง n8n บน QNAP NAS (Docker)
```bash คุณสามารถเพิ่ม PostgreSQL Service เข้าไปใน `docker-compose-lcbp3-n8n.yml` ปัจจุบันบน QNAP NAS ได้ดังนี้:
mkdir -p /data/dms/n8n
cd /data/dms/n8n
cat > docker-compose.yml << 'EOF' ```yaml
version: '3.8' version: '3.8'
x-restart: &restart_policy
restart: unless-stopped
x-logging: &default_logging
logging:
driver: "json-file"
options:
max-size: "10m"
max-file: "5"
services: services:
n8n-db:
<<: [*restart_policy, *default_logging]
image: postgres:16-alpine
container_name: n8n-db
environment:
- POSTGRES_USER=n8n
- POSTGRES_PASSWORD=<strong_password>
- POSTGRES_DB=n8n
volumes:
- "/share/np-dms/n8n/postgres-data:/var/lib/postgresql/data"
networks:
lcbp3: {}
healthcheck:
test: ['CMD-SHELL', 'pg_isready -h localhost -U n8n -d n8n']
interval: 10s
timeout: 5s
retries: 5
n8n: n8n:
image: n8nio/n8n:latest <<: [*restart_policy, *default_logging]
container_name: n8n-migration image: n8nio/n8n:1.78.0
restart: unless-stopped container_name: n8n
# Docker Hardening (Patch) depends_on:
mem_limit: 2g n8n-db:
logging: condition: service_healthy
driver: json-file deploy:
options: resources:
max-size: "10m" limits:
max-file: "3" cpus: "1.5"
memory: 2G
environment:
TZ: "Asia/Bangkok"
NODE_ENV: "production"
N8N_PUBLIC_URL: "https://n8n.np-dms.work/"
WEBHOOK_URL: "https://n8n.np-dms.work/"
N8N_EDITOR_BASE_URL: "https://n8n.np-dms.work/"
N8N_PROTOCOL: "https"
N8N_HOST: "n8n.np-dms.work"
N8N_PORT: 5678
N8N_PROXY_HOPS: "1"
N8N_DIAGNOSTICS_ENABLED: 'false'
N8N_SECURE_COOKIE: 'true'
N8N_ENCRYPTION_KEY: "9AAIB7Da9DW1qAhJE5/Bz4SnbQjeAngI"
N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS: 'true'
GENERIC_TIMEZONE: "Asia/Bangkok"
# DB Setup
DB_TYPE: postgresdb
DB_POSTGRESDB_DATABASE: n8n
DB_POSTGRESDB_HOST: n8n-db
DB_POSTGRESDB_PORT: 5432
DB_POSTGRESDB_USER: n8n
DB_POSTGRESDB_PASSWORD: <strong_password>
# Data Prune
EXECUTIONS_DATA_PRUNE: 'true'
EXECUTIONS_DATA_MAX_AGE: 168
EXECUTIONS_DATA_PRUNE_TIMEOUT: 60
ports: ports:
- "5678:5678" - "5678:5678"
environment:
- N8N_HOST=0.0.0.0
- N8N_PORT=5678
- N8N_PROTOCOL=http
- NODE_ENV=production
- WEBHOOK_URL=http://<NAS_IP>:5678/
- GENERIC_TIMEZONE=Asia/Bangkok
- TZ=Asia/Bangkok
- N8N_SECURE_COOKIE=false
- N8N_USER_FOLDER=/home/node/.n8n
- N8N_PUBLIC_API_DISABLED=true
- N8N_BASIC_AUTH_ACTIVE=true
- N8N_BASIC_AUTH_USER=admin
- N8N_BASIC_AUTH_PASSWORD=<strong_password>
- N8N_PAYLOAD_SIZE_MAX=10485760
- EXECUTIONS_DATA_PRUNE=true
- EXECUTIONS_DATA_MAX_AGE=168
- EXECUTIONS_DATA_PRUNE_TIMEOUT=60
- DB_TYPE=postgresdb
- DB_POSTGRESDB_HOST=<DB_IP>
- DB_POSTGRESDB_PORT=5432
- DB_POSTGRESDB_DATABASE=n8n
- DB_POSTGRESDB_USER=n8n
- DB_POSTGRESDB_PASSWORD=<password>
volumes:
- ./n8n_data:/home/node/.n8n
# read-only: อ่านไฟล์ PDF ต้นฉบับเท่านั้น
- /data/dms/staging_ai:/data/dms/staging_ai:ro
# read-write: เขียน Log และ CSV ทั้งหมด
- /data/dms/migration_logs:/data/dms/migration_logs:rw
networks: networks:
- n8n-network lcbp3: {}
volumes:
networks: - "/share/np-dms/n8n:/home/node/.n8n"
n8n-network: - "/share/np-dms/n8n/cache:/home/node/.cache"
driver: bridge - "/share/np-dms/n8n/scripts:/scripts"
EOF - "/share/np-dms/n8n/data:/data"
- "/var/run/docker.sock:/var/run/docker.sock"
docker-compose up -d # read-only: อ่านไฟล์ PDF ต้นฉบับเท่านั้น
- "/share/np-dms/staging_ai:/share/np-dms/staging_ai:ro"
# read-write: เขียน Log และ CSV ทั้งหมด
- "/share/np-dms/n8n/migration_logs:/share/np-dms/n8n/migration_logs:rw"
``` ```
> ⚠️ **Volume หมายเหตุ:** `/data/dms/staging_ai` = **read-only** (อ่านไฟล์ต้นฉบับ) และ `/data/dms/migration_logs` = **read-write** (เขียน Log/CSV) — ห้ามเขียน CSV ลง `staging_ai` เพราะจะ Error ทันที > ⚠️ **Volume หมายเหตุ:** `/share/np-dms/staging_ai` = **read-only** (อ่านไฟล์ต้นฉบับ) และ `/share/np-dms/n8n/migration_logs` = **read-write** (เขียน Log/CSV) — ห้ามเขียน CSV ลง `staging_ai` เพราะจะ Error ทันที
### 1.2 Nginx Rate Limit ### 1.2 Nginx Rate Limit
@@ -92,19 +118,19 @@ location /api/correspondences/import {
**Settings → Environment Variables ใน n8n UI:** **Settings → Environment Variables ใน n8n UI:**
| Variable | ค่าที่แนะนำ | คำอธิบาย | | Variable | ค่าที่แนะนำ | คำอธิบาย |
| --------------------------- | ---------------------------- | ------------------------------------ | | --------------------------- | ----------------------------- | ---------------------------------- |
| `OLLAMA_HOST` | `http://<ASUSTOR_IP>:11434` | URL ของ Ollama (ใน internal network) | | `OLLAMA_HOST` | `http://192.168.20.100:11434` | URL ของ Ollama (Desktop Desk-5439) |
| `OLLAMA_MODEL_PRIMARY` | `llama3.2:3b` | Model หลัก | | `OLLAMA_MODEL_PRIMARY` | `llama3.2:3b` | Model หลัก |
| `OLLAMA_MODEL_FALLBACK` | `mistral:7b-instruct-q4_K_M` | Model สำรอง | | `OLLAMA_MODEL_FALLBACK` | `mistral:7b-instruct-q4_K_M` | Model สำรอง |
| `MIGRATION_BATCH_SIZE` | `10` | จำนวน Record ต่อ Batch | | `MIGRATION_BATCH_SIZE` | `10` | จำนวน Record ต่อ Batch |
| `MIGRATION_DELAY_MS` | `2000` | Delay ระหว่าง Request (ms) | | `MIGRATION_DELAY_MS` | `2000` | Delay ระหว่าง Request (ms) |
| `CONFIDENCE_THRESHOLD_HIGH` | `0.85` | Threshold Auto Ingest | | `CONFIDENCE_THRESHOLD_HIGH` | `0.85` | Threshold Auto Ingest |
| `CONFIDENCE_THRESHOLD_LOW` | `0.60` | Threshold Review Queue | | `CONFIDENCE_THRESHOLD_LOW` | `0.60` | Threshold Review Queue |
| `MAX_RETRY_COUNT` | `3` | จำนวนครั้ง Retry | | `MAX_RETRY_COUNT` | `3` | จำนวนครั้ง Retry |
| `FALLBACK_ERROR_THRESHOLD` | `5` | Error ที่ trigger Fallback | | `FALLBACK_ERROR_THRESHOLD` | `5` | Error ที่ trigger Fallback |
| `BACKEND_URL` | `https://<BACKEND_URL>` | URL ของ LCBP3 Backend | | `BACKEND_URL` | `https://<BACKEND_URL>` | URL ของ LCBP3 Backend |
| `MIGRATION_BATCH_ID` | `migration_20260226` | ID ของ Batch | | `MIGRATION_BATCH_ID` | `migration_20260226` | ID ของ Batch |
--- ---
@@ -193,12 +219,12 @@ CREATE TABLE IF NOT EXISTS migration_daily_summary (
**Credentials → Add New:** **Credentials → Add New:**
#### 🔐 Ollama API #### 🔐 Ollama API
| Field | ค่า | | Field | ค่า |
| -------------- | --------------------------- | | -------------- | ----------------------------- |
| Name | `Ollama Local API` | | Name | `Ollama Local API` |
| Type | `HTTP Request` | | Type | `HTTP Request` |
| Base URL | `http://<ASUSTOR_IP>:11434` | | Base URL | `http://192.168.20.100:11434` |
| Authentication | `None` | | Authentication | `None` |
#### 🔐 LCBP3 Backend API #### 🔐 LCBP3 Backend API
| Field | ค่า | | Field | ค่า |
@@ -306,9 +332,9 @@ $workflow.variables.system_categories = categories;
// ตรวจ File Mount // ตรวจ File Mount
try { try {
const files = fs.readdirSync('/data/dms/staging_ai'); const files = fs.readdirSync('/share/np-dms/staging_ai');
if (files.length === 0) throw new Error('staging_ai is empty'); if (files.length === 0) throw new Error('staging_ai is empty');
fs.writeFileSync('/data/dms/migration_logs/.preflight_ok', new Date().toISOString()); fs.writeFileSync('/share/np-dms/n8n/migration_logs/.preflight_ok', new Date().toISOString());
} catch (err) { } catch (err) {
throw new Error(`File mount check failed: ${err.message}`); throw new Error(`File mount check failed: ${err.message}`);
} }
@@ -382,9 +408,9 @@ for (const item of items) {
const safeName = path.basename( const safeName = path.basename(
String(docNumber).replace(/[^a-zA-Z0-9\-_.]/g, '_') String(docNumber).replace(/[^a-zA-Z0-9\-_.]/g, '_')
).normalize('NFC'); ).normalize('NFC');
const filePath = path.resolve('/data/dms/staging_ai', `${safeName}.pdf`); const filePath = path.resolve('/share/np-dms/staging_ai', `${safeName}.pdf`);
if (!filePath.startsWith('/data/dms/staging_ai/')) { if (!filePath.startsWith('/share/np-dms/staging_ai/')) {
errorItems.push({ ...item, json: { ...item.json, error: 'Path traversal detected', error_type: 'FILE_NOT_FOUND' } }); errorItems.push({ ...item, json: { ...item.json, error: 'Path traversal detected', error_type: 'FILE_NOT_FOUND' } });
continue; continue;
} }
@@ -612,7 +638,10 @@ return [autoIngest, reviewQueue, rejectLog, errorLog];
"ai_confidence": "={{ $json.ai_result.confidence }}", "ai_confidence": "={{ $json.ai_result.confidence }}",
"ai_issues": "={{ $json.ai_result.detected_issues }}", "ai_issues": "={{ $json.ai_result.detected_issues }}",
"migrated_by": "SYSTEM_IMPORT", "migrated_by": "SYSTEM_IMPORT",
"batch_id": "={{ $env.MIGRATION_BATCH_ID }}" "batch_id": "={{ $env.MIGRATION_BATCH_ID }}",
"details": {
"legacy_number": "={{ $json.legacy_document_number }}"
}
}, },
"options": { "timeout": 30000, "retry": { "count": 3, "delay": 5000 } } "options": { "timeout": 30000, "retry": { "count": 3, "delay": 5000 } }
} }
@@ -662,12 +691,12 @@ ON DUPLICATE KEY UPDATE status = 'PENDING', review_reason = '{{ $json.review_rea
--- ---
### 4.10 Node 5C: Reject Log → `/data/migration_logs/` #### 4.10 Node 5C: Reject Log → `/share/np-dms/n8n/migration_logs/`
```javascript ```javascript
const fs = require('fs'); const fs = require('fs');
const item = $input.first(); const item = $input.first();
const csvPath = '/data/dms/migration_logs/reject_log.csv'; const csvPath = '/share/np-dms/n8n/migration_logs/reject_log.csv';
const header = 'timestamp,document_number,title,reject_reason,ai_confidence,ai_issues\n'; const header = 'timestamp,document_number,title,reject_reason,ai_confidence,ai_issues\n';
const esc = (s) => `"${String(s||'').replace(/"/g,'""')}"`; const esc = (s) => `"${String(s||'').replace(/"/g,'""')}"`;
@@ -687,12 +716,12 @@ return [$input.first()];
--- ---
### 4.11 Node 5D: Error Log → `/data/migration_logs/` + MariaDB #### 4.11 Node 5D: Error Log → `/share/np-dms/n8n/migration_logs/` + MariaDB
```javascript ```javascript
const fs = require('fs'); const fs = require('fs');
const item = $input.first(); const item = $input.first();
const csvPath = '/data/dms/migration_logs/error_log.csv'; const csvPath = '/share/np-dms/n8n/migration_logs/error_log.csv';
const header = 'timestamp,document_number,error_type,error_message,raw_ai_response\n'; const header = 'timestamp,document_number,error_type,error_message,raw_ai_response\n';
const esc = (s) => `"${String(s||'').replace(/"/g,'""')}"`; const esc = (s) => `"${String(s||'').replace(/"/g,'""')}"`;

View File

@@ -1,29 +0,0 @@
-- Fix Project Permissions
-- File: specs/07-database/fix-project-permissions.sql
-- 1. Ensure project.view permission exists
INSERT IGNORE INTO permissions (
permission_id,
permission_name,
description,
module,
is_active
)
VALUES (
202,
'project.view',
'ดูรายการโครงการ',
'project',
1
);
-- 2. Grant project.view to Superadmin (Role 1)
INSERT IGNORE INTO role_permissions (role_id, permission_id)
VALUES (1, 202);
-- 3. Grant project.view to Organization Admin (Role 2)
INSERT IGNORE INTO role_permissions (role_id, permission_id)
VALUES (2, 202);
-- 4. Grant project.view to Project Manager (Role 6)
INSERT IGNORE INTO role_permissions (role_id, permission_id)
VALUES (6, 202);
-- 5. Grant project.view to Viewer (Role 5)
INSERT IGNORE INTO role_permissions (role_id, permission_id)
VALUES (5, 202);

View File

@@ -16,12 +16,16 @@
-- Major Changes: -- Major Changes:
-- 1. ปรับปรุง: -- 1. ปรับปรุง:
-- 1.1 TABLE correspondences -- 1.1 TABLE correspondences
-- - INDEX idx_doc_number (document_number), -- - INDEX idx_doc_number (correspondence_number),
-- - INDEX idx_deleted_at (deleted_at), -- - INDEX idx_deleted_at (deleted_at),
-- - INDEX idx_created_by (created_by), -- - INDEX idx_created_by (created_by),
-- 2. เพิ่ม: -- 2. เพิ่ม:
-- 2.1 TABLE migration_progress -- 2.1 TABLE migration_progress
-- 2.2 TABLE import_transactions -- 2.2 TABLE import_transactions
-- 2.3 TABLE migration_review_queue
-- 2.4 TABLE migration_errors
-- 2.5 TABLE migration_fallback_state
-- 2.6 TABLE migration_daily_summary
-- ========================================================== -- ==========================================================
SET NAMES utf8mb4; SET NAMES utf8mb4;
@@ -50,8 +54,19 @@ DROP VIEW IF EXISTS v_current_correspondences;
-- 🗑️ DROP TABLE SCRIPT: LCBP3-DMS v1.4.2 -- 🗑️ DROP TABLE SCRIPT: LCBP3-DMS v1.4.2
-- คำเตือน: ข้อมูลทั้งหมดจะหายไป กรุณา Backup ก่อนรันบน Production -- คำเตือน: ข้อมูลทั้งหมดจะหายไป กรุณา Backup ก่อนรันบน Production
SET FOREIGN_KEY_CHECKS = 0; SET FOREIGN_KEY_CHECKS = 0;
DROP TABLE IF EXISTS migration_progress; DROP TABLE IF EXISTS migration_progress;
DROP TABLE IF EXISTS import_transactions; DROP TABLE IF EXISTS import_transactions;
DROP TABLE IF EXISTS migration_review_queue;
DROP TABLE IF EXISTS migration_errors;
DROP TABLE IF EXISTS migration_fallback_state;
DROP TABLE IF EXISTS migration_daily_summary;
-- ============================================================ -- ============================================================
-- ส่วนที่ 1: ตาราง System, Logs & Preferences (ตารางปลายทาง/ส่วนเสริม) -- ส่วนที่ 1: ตาราง System, Logs & Preferences (ตารางปลายทาง/ส่วนเสริม)
-- ============================================================ -- ============================================================
@@ -472,16 +487,19 @@ CREATE TABLE correspondences (
created_at DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'วันที่สร้าง', created_at DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'วันที่สร้าง',
created_by INT COMMENT 'ผู้สร้าง', created_by INT COMMENT 'ผู้สร้าง',
deleted_at DATETIME NULL COMMENT 'สำหรับ Soft Delete', deleted_at DATETIME NULL COMMENT 'สำหรับ Soft Delete',
INDEX idx_doc_number (document_number), INDEX idx_corr_number (correspondence_number),
INDEX idx_deleted_at (deleted_at), INDEX idx_deleted_at (deleted_at),
INDEX idx_created_by (created_by), INDEX idx_created_by (created_by),
FOREIGN KEY (correspondence_type_id) REFERENCES correspondence_types (id) ON DELETE RESTRICT, FOREIGN KEY (correspondence_type_id) REFERENCES correspondence_types (id) ON DELETE RESTRICT,
FOREIGN KEY (project_id) REFERENCES projects (id) ON DELETE CASCADE, FOREIGN KEY (project_id) REFERENCES projects (id) ON DELETE CASCADE,
FOREIGN KEY (originator_id) REFERENCES organizations (id) ON DELETE SET NULL, FOREIGN KEY (originator_id) REFERENCES organizations (id) ON DELETE
FOREIGN KEY (created_by) REFERENCES users (user_id) ON DELETE SET NULL, SET NULL,
FOREIGN KEY (created_by) REFERENCES users (user_id) ON DELETE
SET NULL,
-- Foreign Key ที่รวมเข้ามาจาก ALTER (ระบุชื่อ Constraint ตามที่ต้องการ) -- Foreign Key ที่รวมเข้ามาจาก ALTER (ระบุชื่อ Constraint ตามที่ต้องการ)
CONSTRAINT fk_corr_discipline FOREIGN KEY (discipline_id) REFERENCES disciplines (id) ON DELETE SET NULL, CONSTRAINT fk_corr_discipline FOREIGN KEY (discipline_id) REFERENCES disciplines (id) ON DELETE
UNIQUE KEY uq_corr_no_per_project (project_id, correspondence_number) SET NULL,
UNIQUE KEY uq_corr_no_per_project (project_id, correspondence_number)
) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_general_ci COMMENT = 'ตาราง "แม่" ของเอกสารโต้ตอบ เก็บข้อมูลที่ไม่เปลี่ยนตาม Revision'; ) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE = utf8mb4_general_ci COMMENT = 'ตาราง "แม่" ของเอกสารโต้ตอบ เก็บข้อมูลที่ไม่เปลี่ยนตาม Revision';
-- ตารางเชื่อมผู้รับ (TO/CC) สำหรับเอกสารแต่ละฉบับ (M:N) -- ตารางเชื่อมผู้รับ (TO/CC) สำหรับเอกสารแต่ละฉบับ (M:N)
@@ -1545,11 +1563,12 @@ CREATE INDEX idx_wf_hist_user ON workflow_histories (action_by_user_id);
-- Checkpoint Table: -- Checkpoint Table:
CREATE TABLE IF NOT EXISTS migration_progress ( CREATE TABLE IF NOT EXISTS migration_progress (
batch_id VARCHAR(50) PRIMARY KEY, batch_id VARCHAR(50) PRIMARY KEY,
last_processed_index INT DEFAULT 0, last_processed_index INT DEFAULT 0,
status ENUM('RUNNING','COMPLETED','FAILED') DEFAULT 'RUNNING', STATUS ENUM('RUNNING', 'COMPLETED', 'FAILED') DEFAULT 'RUNNING',
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
); );
-- Idempotency Table : -- Idempotency Table :
CREATE TABLE IF NOT EXISTS import_transactions ( CREATE TABLE IF NOT EXISTS import_transactions (
id INT AUTO_INCREMENT PRIMARY KEY, id INT AUTO_INCREMENT PRIMARY KEY,
@@ -1560,6 +1579,7 @@ CREATE TABLE IF NOT EXISTS import_transactions (
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_idem_key (idempotency_key) INDEX idx_idem_key (idempotency_key)
); );
-- ============================================================ -- ============================================================
-- 5. PARTITIONING PREPARATION (Advance - Optional) -- 5. PARTITIONING PREPARATION (Advance - Optional)
-- ============================================================ -- ============================================================
@@ -2049,6 +2069,87 @@ CREATE INDEX idx_correspondences_type_project ON correspondences (correspondence
CREATE INDEX idx_corr_revisions_current_status ON correspondence_revisions (is_current, correspondence_status_id); CREATE INDEX idx_corr_revisions_current_status ON correspondence_revisions (is_current, correspondence_status_id);
-- =====================================================
-- Migration Tracking Tables (Temporary)
-- =====================================================
-- Checkpoint
CREATE TABLE IF NOT EXISTS migration_progress (
batch_id VARCHAR(50) PRIMARY KEY,
last_processed_index INT DEFAULT 0,
STATUS ENUM('RUNNING', 'COMPLETED', 'FAILED') DEFAULT 'RUNNING',
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
);
-- Review Queue (Temporary — ไม่ใช่ Business Schema)
CREATE TABLE IF NOT EXISTS migration_review_queue (
id INT AUTO_INCREMENT PRIMARY KEY,
document_number VARCHAR(100) NOT NULL,
title TEXT,
original_title TEXT,
ai_suggested_category VARCHAR(50),
ai_confidence DECIMAL(4, 3),
ai_issues JSON,
review_reason VARCHAR(255),
STATUS ENUM('PENDING', 'APPROVED', 'REJECTED') DEFAULT 'PENDING',
reviewed_by VARCHAR(100),
reviewed_at TIMESTAMP NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE KEY uq_doc_number (document_number)
);
-- Error Log
CREATE TABLE IF NOT EXISTS migration_errors (
id INT AUTO_INCREMENT PRIMARY KEY,
batch_id VARCHAR(50),
document_number VARCHAR(100),
error_type ENUM(
'FILE_NOT_FOUND',
'AI_PARSE_ERROR',
'API_ERROR',
'DB_ERROR',
'UNKNOWN'
),
error_message TEXT,
raw_ai_response TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_batch_id (batch_id),
INDEX idx_error_type (error_type)
);
-- Fallback State
CREATE TABLE IF NOT EXISTS migration_fallback_state (
id INT AUTO_INCREMENT PRIMARY KEY,
batch_id VARCHAR(50) UNIQUE,
recent_error_count INT DEFAULT 0,
is_fallback_active BOOLEAN DEFAULT FALSE,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
);
-- Idempotency (Patch)
CREATE TABLE IF NOT EXISTS import_transactions (
id INT AUTO_INCREMENT PRIMARY KEY,
idempotency_key VARCHAR(255) UNIQUE NOT NULL,
document_number VARCHAR(100),
batch_id VARCHAR(100),
status_code INT DEFAULT 201,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
INDEX idx_idem_key (idempotency_key)
);
-- Daily Summary
CREATE TABLE IF NOT EXISTS migration_daily_summary (
id INT AUTO_INCREMENT PRIMARY KEY,
batch_id VARCHAR(50),
summary_date DATE,
total_processed INT DEFAULT 0,
auto_ingested INT DEFAULT 0,
sent_to_review INT DEFAULT 0,
rejected INT DEFAULT 0,
errors INT DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE KEY uq_batch_date (batch_id, summary_date)
);
CREATE INDEX idx_corr_revisions_correspondence_current ON correspondence_revisions (correspondence_id, is_current); CREATE INDEX idx_corr_revisions_correspondence_current ON correspondence_revisions (correspondence_id, is_current);
-- Indexes for v_current_rfas performance -- Indexes for v_current_rfas performance

View File

@@ -18,7 +18,6 @@
-- 2.1 username = migration_bot -- 2.1 username = migration_bot
-- 2.2 -- 2.2
-- ========================================================== -- ==========================================================
INSERT INTO organization_roles (id, role_name) INSERT INTO organization_roles (id, role_name)
VALUES (1, 'OWNER'), VALUES (1, 'OWNER'),
(2, 'DESIGNER'), (2, 'DESIGNER'),
@@ -26,6 +25,7 @@ VALUES (1, 'OWNER'),
(4, 'CONTRACTOR'), (4, 'CONTRACTOR'),
(5, 'THIRD PARTY'), (5, 'THIRD PARTY'),
(6, 'GUEST'); (6, 'GUEST');
INSERT INTO organizations ( INSERT INTO organizations (
id, id,
organization_code, organization_code,
@@ -100,6 +100,7 @@ VALUES (1, 'กทท.', 'การท่าเรือแห่งประเ
), ),
(31, 'EN', 'Third Party Environment', 5), (31, 'EN', 'Third Party Environment', 5),
(32, 'CAR', 'Third Party Fishery Care', 5); (32, 'CAR', 'Third Party Fishery Care', 5);
-- Seed project -- Seed project
INSERT INTO projects (project_code, project_name) INSERT INTO projects (project_code, project_name)
VALUES ( VALUES (
@@ -126,6 +127,7 @@ VALUES (
'LCBP3-EN', 'LCBP3-EN',
'โครงการพัฒนาท่าเรือแหลมฉบัง ระยะที่ 3 (ส่วนที่ 4) งานก่อสร้าง' 'โครงการพัฒนาท่าเรือแหลมฉบัง ระยะที่ 3 (ส่วนที่ 4) งานก่อสร้าง'
); );
-- Seed contract -- Seed contract
-- ใช้ Subquery เพื่อดึง project_id มาเชื่อมโยง ทำให้ไม่ต้องมานั่งจัดการ ID ด้วยตัวเอง -- ใช้ Subquery เพื่อดึง project_id มาเชื่อมโยง ทำให้ไม่ต้องมานั่งจัดการ ID ด้วยตัวเอง
INSERT INTO contracts ( INSERT INTO contracts (
@@ -204,6 +206,7 @@ VALUES (
), ),
TRUE TRUE
); );
-- Seed user -- Seed user
-- Initial SUPER_ADMIN user -- Initial SUPER_ADMIN user
INSERT INTO users ( INSERT INTO users (
@@ -252,18 +255,32 @@ VALUES (
'$2b$10$MpKnf1UEvlu8hZcqMkhMsuWG3gYD/priWTUr71GpF/uuroaGxtose', '$2b$10$MpKnf1UEvlu8hZcqMkhMsuWG3gYD/priWTUr71GpF/uuroaGxtose',
'Viewer', 'Viewer',
'สคฉ.03', 'สคฉ.03',
'viewer01 @example.com', 'viewer01@example.com',
NULL, NULL,
10 10
); );
INSERT INTO users (username, email, role, is_active) INSERT INTO users (
VALUES ( user_id,
username,
password_hash,
first_name,
last_name,
email,
line_id,
primary_organization_id
)
VALUES (
5,
'migration_bot', 'migration_bot',
'$2b$10$MpKnf1UEvlu8hZcqMkhMsuWG3gYD/priWTUr71GpF/uuroaGxtose',
'Migration',
'Bot',
'migration@system.internal', 'migration@system.internal',
'SYSTEM_ADMIN', NULL,
TRUE 1
); );
-- ========================================================== -- ==========================================================
-- Seed Roles (บทบาทพื้นฐาน 5 บทบาท ตาม Req 4.3) -- Seed Roles (บทบาทพื้นฐาน 5 บทบาท ตาม Req 4.3)
-- ========================================================== -- ==========================================================
@@ -317,6 +334,7 @@ VALUES (
'Contract', 'Contract',
'ผู้ดูแลสัญญา: จัดการสมาชิกในสัญญา, สร้าง / จัดการข้อมูลหลักเฉพาะสัญญา, และอนุมัติเอกสารในสัญญา' 'ผู้ดูแลสัญญา: จัดการสมาชิกในสัญญา, สร้าง / จัดการข้อมูลหลักเฉพาะสัญญา, และอนุมัติเอกสารในสัญญา'
); );
-- ========================================================== -- ==========================================================
-- Seed Role-Permissions Mapping (จับคู่สิทธิ์เริ่มต้น) -- Seed Role-Permissions Mapping (จับคู่สิทธิ์เริ่มต้น)
-- ========================================================== -- ==========================================================
@@ -343,8 +361,11 @@ VALUES (1, 1, 1, NULL, NULL, NULL, NULL),
-- admin: Organization scope (org_id=1 = กทท.) -- admin: Organization scope (org_id=1 = กทท.)
(3, 3, 4, 41, NULL, NULL, 1), (3, 3, 4, 41, NULL, NULL, 1),
-- editor01: Editor role (role_id=4) at organization 41 (คคง.), assigned by superadmin -- editor01: Editor role (role_id=4) at organization 41 (คคง.), assigned by superadmin
(4, 4, 5, 10, NULL, NULL, 1); (4, 4, 5, 10, NULL, NULL, 1),
-- viewer01: Viewer role (role_id=5) at organization 10 (สคฉ.03), assigned by superadmin -- viewer01: Viewer role (role_id=5) at organization 10 (สคฉ.03), assigned by superadmin
(5, 5, 1, NULL, NULL, NULL, 1);
-- migration_bot: Superadmin role (role_id=1) for migration scripts, assigned by superadmin
-- ===================================================== -- =====================================================
-- == 4. การเชื่อมโยงโครงการกับองค์กร (project_organizations) == -- == 4. การเชื่อมโยงโครงการกับองค์กร (project_organizations) ==
-- ===================================================== -- =====================================================
@@ -369,6 +390,7 @@ WHERE organization_code IN (
'EN', 'EN',
'CAR' 'CAR'
); );
-- โครงการย่อย (LCBP3C1) จะมีเฉพาะองค์กรที่เกี่ยวข้อง -- โครงการย่อย (LCBP3C1) จะมีเฉพาะองค์กรที่เกี่ยวข้อง
INSERT INTO project_organizations (project_id, organization_id) INSERT INTO project_organizations (project_id, organization_id)
SELECT ( SELECT (
@@ -385,6 +407,7 @@ WHERE organization_code IN (
'คคง.', 'คคง.',
'ผรม.1 ' 'ผรม.1 '
); );
-- ทำเช่นเดียวกันสำหรับโครงการอื่นๆ (ตัวอย่าง) -- ทำเช่นเดียวกันสำหรับโครงการอื่นๆ (ตัวอย่าง)
INSERT INTO project_organizations (project_id, organization_id) INSERT INTO project_organizations (project_id, organization_id)
SELECT ( SELECT (
@@ -401,6 +424,7 @@ WHERE organization_code IN (
'คคง.', 'คคง.',
'ผรม.2' 'ผรม.2'
); );
-- ===================================================== -- =====================================================
-- == 5. การเชื่อมโยงสัญญากับองค์กร (contract_organizations) == -- == 5. การเชื่อมโยงสัญญากับองค์กร (contract_organizations) ==
-- ===================================================== -- =====================================================
@@ -432,6 +456,7 @@ VALUES (
), ),
'Designer' 'Designer'
); );
-- สัญญาที่ปรึกษาควบคุมงาน (PSLCBP3) -- สัญญาที่ปรึกษาควบคุมงาน (PSLCBP3)
INSERT INTO contract_organizations (contract_id, organization_id, role_in_contract) INSERT INTO contract_organizations (contract_id, organization_id, role_in_contract)
VALUES ( VALUES (
@@ -460,6 +485,7 @@ VALUES (
), ),
'Consultant' 'Consultant'
); );
-- สัญญางานก่อสร้าง ส่วนที่ 1 (LCBP3-C1) -- สัญญางานก่อสร้าง ส่วนที่ 1 (LCBP3-C1)
INSERT INTO contract_organizations (contract_id, organization_id, role_in_contract) INSERT INTO contract_organizations (contract_id, organization_id, role_in_contract)
VALUES ( VALUES (
@@ -488,6 +514,7 @@ VALUES (
), ),
'Contractor' 'Contractor'
); );
-- สัญญางานก่อสร้าง ส่วนที่ 2 (LCBP3-C2) -- สัญญางานก่อสร้าง ส่วนที่ 2 (LCBP3-C2)
INSERT INTO contract_organizations (contract_id, organization_id, role_in_contract) INSERT INTO contract_organizations (contract_id, organization_id, role_in_contract)
VALUES ( VALUES (
@@ -516,6 +543,7 @@ VALUES (
), ),
'Contractor' 'Contractor'
); );
-- สัญญาตรวจสอบสิ่งแวดล้อม (LCBP3-EN) -- สัญญาตรวจสอบสิ่งแวดล้อม (LCBP3-EN)
INSERT INTO contract_organizations (contract_id, organization_id, role_in_contract) INSERT INTO contract_organizations (contract_id, organization_id, role_in_contract)
VALUES ( VALUES (
@@ -544,6 +572,7 @@ VALUES (
), ),
'Consultant' 'Consultant'
); );
-- Seed correspondence_status -- Seed correspondence_status
INSERT INTO correspondence_status ( INSERT INTO correspondence_status (
status_code, status_code,
@@ -574,6 +603,7 @@ VALUES ('DRAFT', 'Draft', 10, 1),
('CCBDSN', 'Canceled by Designer', 92, 1), ('CCBDSN', 'Canceled by Designer', 92, 1),
('CCBCSC', 'Canceled by CSC', 93, 1), ('CCBCSC', 'Canceled by CSC', 93, 1),
('CCBCON', 'Canceled by Contractor', 94, 1); ('CCBCON', 'Canceled by Contractor', 94, 1);
-- Seed correspondence_types -- Seed correspondence_types
INSERT INTO correspondence_types (type_code, type_name, sort_order, is_active) INSERT INTO correspondence_types (type_code, type_name, sort_order, is_active)
VALUES ('RFA', 'Request for Approval', 1, 1), VALUES ('RFA', 'Request for Approval', 1, 1),
@@ -586,6 +616,7 @@ VALUES ('RFA', 'Request for Approval', 1, 1),
('MOM', 'Minutes of Meeting', 8, 1), ('MOM', 'Minutes of Meeting', 8, 1),
('NOTICE', 'Notice', 9, 1), ('NOTICE', 'Notice', 9, 1),
('OTHER', 'Other', 10, 1); ('OTHER', 'Other', 10, 1);
-- Seed rfa_types -- Seed rfa_types
INSERT INTO rfa_types ( INSERT INTO rfa_types (
contract_id, contract_id,
@@ -1075,6 +1106,7 @@ SELECT id,
'รายงานการฝึกปฏิบัติ' 'รายงานการฝึกปฏิบัติ'
FROM contracts FROM contracts
WHERE contract_code = 'LCBP3-C2'; WHERE contract_code = 'LCBP3-C2';
-- Seed rfa_status_codes -- Seed rfa_status_codes
INSERT INTO rfa_status_codes ( INSERT INTO rfa_status_codes (
status_code, status_code,
@@ -1089,6 +1121,7 @@ VALUES ('DFT', 'Draft', 'ฉบับร่าง', 1),
('ASB', 'AS - Built', 'แบบก่อสร้างจริง', 30), ('ASB', 'AS - Built', 'แบบก่อสร้างจริง', 30),
('OBS', 'Obsolete', 'ไม่ใช้งาน', 80), ('OBS', 'Obsolete', 'ไม่ใช้งาน', 80),
('CC', 'Canceled', 'ยกเลิก', 99); ('CC', 'Canceled', 'ยกเลิก', 99);
INSERT INTO rfa_approve_codes ( INSERT INTO rfa_approve_codes (
approve_code, approve_code,
approve_name, approve_name,
@@ -1103,12 +1136,14 @@ VALUES ('1A', 'Approved by Authority', 10, 1),
('3R', 'Revise and Resubmit', 32, 1), ('3R', 'Revise and Resubmit', 32, 1),
('4X', 'Reject', 40, 1), ('4X', 'Reject', 40, 1),
('5N', 'No Further Action', 50, 1); ('5N', 'No Further Action', 50, 1);
-- Seed circulation_status_codes -- Seed circulation_status_codes
INSERT INTO circulation_status_codes (code, description, sort_order) INSERT INTO circulation_status_codes (code, description, sort_order)
VALUES ('OPEN', 'Open', 1), VALUES ('OPEN', 'Open', 1),
('IN_REVIEW', 'In Review', 2), ('IN_REVIEW', 'In Review', 2),
('COMPLETED', 'ปCompleted', 3), ('COMPLETED', 'ปCompleted', 3),
('CANCELLED', 'Cancelled / Withdrawn', 9); ('CANCELLED', 'Cancelled / Withdrawn', 9);
-- ตาราง "แม่" ของ RFA (มีความสัมพันธ์ 1:N กับ rfa_revisions) -- ตาราง "แม่" ของ RFA (มีความสัมพันธ์ 1:N กับ rfa_revisions)
-- ========================================================== -- ==========================================================
-- SEED DATA 6B.md (Disciplines, RFA Types, Sub Types) -- SEED DATA 6B.md (Disciplines, RFA Types, Sub Types)
@@ -1372,6 +1407,7 @@ SELECT id,
'Other' 'Other'
FROM contracts FROM contracts
WHERE contract_code = 'LCBP3-C1'; WHERE contract_code = 'LCBP3-C1';
-- LCBP3-C2 -- LCBP3-C2
INSERT INTO disciplines ( INSERT INTO disciplines (
contract_id, contract_id,
@@ -1616,6 +1652,7 @@ SELECT id,
'Others' 'Others'
FROM contracts FROM contracts
WHERE contract_code = 'LCBP3-C2'; WHERE contract_code = 'LCBP3-C2';
-- 2. Seed ข้อมูล Correspondence Sub Types (Mapping RFA Types กับ Number) -- 2. Seed ข้อมูล Correspondence Sub Types (Mapping RFA Types กับ Number)
-- เนื่องจาก sub_type_code ตรงกับ RFA Type Code แต่ Req ต้องการ Mapping เป็น Number -- เนื่องจาก sub_type_code ตรงกับ RFA Type Code แต่ Req ต้องการ Mapping เป็น Number
-- LCBP3-C1 -- LCBP3-C1
@@ -1666,6 +1703,7 @@ FROM contracts c,
correspondence_types ct correspondence_types ct
WHERE c.contract_code = 'LCBP3-C1' WHERE c.contract_code = 'LCBP3-C1'
AND ct.type_code = 'RFA'; AND ct.type_code = 'RFA';
-- LCBP3-C2 -- LCBP3-C2
INSERT INTO correspondence_sub_types ( INSERT INTO correspondence_sub_types (
contract_id, contract_id,
@@ -1713,6 +1751,7 @@ FROM contracts c,
correspondence_types ct correspondence_types ct
WHERE c.contract_code = 'LCBP3-C2' WHERE c.contract_code = 'LCBP3-C2'
AND ct.type_code = 'RFA'; AND ct.type_code = 'RFA';
-- LCBP3-C3 -- LCBP3-C3
INSERT INTO correspondence_sub_types ( INSERT INTO correspondence_sub_types (
contract_id, contract_id,
@@ -1760,6 +1799,7 @@ FROM contracts c,
correspondence_types ct correspondence_types ct
WHERE c.contract_code = 'LCBP3-C3' WHERE c.contract_code = 'LCBP3-C3'
AND ct.type_code = 'RFA'; AND ct.type_code = 'RFA';
-- LCBP3-C4 -- LCBP3-C4
INSERT INTO correspondence_sub_types ( INSERT INTO correspondence_sub_types (
contract_id, contract_id,
@@ -1807,6 +1847,7 @@ FROM contracts c,
correspondence_types ct correspondence_types ct
WHERE c.contract_code = 'LCBP3-C4' WHERE c.contract_code = 'LCBP3-C4'
AND ct.type_code = 'RFA'; AND ct.type_code = 'RFA';
INSERT INTO `correspondences` ( INSERT INTO `correspondences` (
`id`, `id`,
`correspondence_number`, `correspondence_number`,
@@ -1843,6 +1884,7 @@ VALUES (
1, 1,
NULL NULL
); );
INSERT INTO `correspondence_revisions` ( INSERT INTO `correspondence_revisions` (
`id`, `id`,
`correspondence_id`, `correspondence_id`,
@@ -1881,6 +1923,7 @@ VALUES (
1, 1,
NULL NULL
); );
INSERT INTO `rfas` ( INSERT INTO `rfas` (
`id`, `id`,
`rfa_type_id`, `rfa_type_id`,
@@ -1889,6 +1932,7 @@ INSERT INTO `rfas` (
`deleted_at` `deleted_at`
) )
VALUES (2, 68, '2025-12-06 05:40:02', 1, NULL); VALUES (2, 68, '2025-12-06 05:40:02', 1, NULL);
INSERT INTO `rfa_revisions` ( INSERT INTO `rfa_revisions` (
`id`, `id`,
`rfa_id`, `rfa_id`,
@@ -1929,6 +1973,7 @@ VALUES (
NULL, NULL,
NULL NULL
); );
-- ========================================================== -- ==========================================================
-- 20. Workflow Definitions (Unified Workflow Engine) -- 20. Workflow Definitions (Unified Workflow Engine)
-- ========================================================== -- ==========================================================
@@ -2165,6 +2210,7 @@ VALUES (
NOW(), NOW(),
NOW() NOW()
); );
INSERT INTO `document_number_formats` ( INSERT INTO `document_number_formats` (
`id`, `id`,
`project_id`, `project_id`,

View File

@@ -1,15 +1,14 @@
-- ========================================================== -- ==========================================================
-- DMS DMS v0.5.0 -- DMS v1.8.0 Document Management System Database
-- Database v5.1 - Seed contract_dwg data -- Seed Contract Drawing data
-- Server: Container Station on QNAPQNAP TS-473A -- Server: Container Station on QNAP TS-473A
-- Database service: MariaDB 10.11 -- Database service: MariaDB 11.8
-- database ui: phpmyadmin 5-apache -- database web ui: phpmyadmin 5-apache
-- backend sevice: node.js -- database development ui: DBeaver
-- frontend sevice: next.js -- backend service: NestJS
-- reverse proxy: nginx 1.27-alpine -- frontend service: next.js
-- reverse proxy: jc21/nginx-proxy-manager:latest
-- cron service: n8n -- cron service: n8n
-- scripts: alpine:3.20
-- Notes:
-- ========================================================== -- ==========================================================
INSERT INTO contract_drawing_volumes (project_id, volume_code, volume_name, description) INSERT INTO contract_drawing_volumes (project_id, volume_code, volume_name, description)

View File

@@ -1,8 +1,9 @@
-- ========================================================== -- ==========================================================
-- DMS v1.6.0 - Permissions Seed Data (REORGANIZED) -- DMS v1.8.0 - Permissions Seed Data (REORGANIZED)
-- File: specs/07-database/permissions-seed-data.sql -- File: specs/07-database/lcbp3-v1.8.0-seed-permissions.sql
-- Total Permissions: 85 (Reorganized with systematic ID allocation) -- Total Permissions: 85 (Reorganized with systematic ID allocation)
-- Created: 2025-12-13 -- Created: 2025-12-13
-- Updated: 2026-02-28 (v1.8.0 merge)
-- ========================================================== -- ==========================================================
-- Clear existing data -- Clear existing data
TRUNCATE TABLE role_permissions; TRUNCATE TABLE role_permissions;
@@ -1065,3 +1066,37 @@ VALUES -- Contract Management
-- ========================================================== -- ==========================================================
-- VERIFICATION: Run permissions-verification.sql after this -- VERIFICATION: Run permissions-verification.sql after this
-- ========================================================== -- ==========================================================
-- ==========================================================
-- MERGED FROM fix-project-permissions.sql (v1.8.0 Update)
-- ==========================================================
-- Fix Project Permissions
-- File: specs/07-database/fix-project-permissions.sql
-- 1. Ensure project.view permission exists
INSERT IGNORE INTO permissions (
permission_id,
permission_name,
description,
module,
is_active
)
VALUES (
202,
'project.view',
'ดูรายการโครงการ',
'project',
1
);
-- 2. Grant project.view to Superadmin (Role 1)
INSERT IGNORE INTO role_permissions (role_id, permission_id)
VALUES (1, 202);
-- 3. Grant project.view to Organization Admin (Role 2)
INSERT IGNORE INTO role_permissions (role_id, permission_id)
VALUES (2, 202);
-- 4. Grant project.view to Project Manager (Role 6)
INSERT IGNORE INTO role_permissions (role_id, permission_id)
VALUES (6, 202);
-- 5. Grant project.view to Viewer (Role 5)
INSERT IGNORE INTO role_permissions (role_id, permission_id)
VALUES (5, 202);

View File

@@ -1,3 +1,15 @@
-- ==========================================================
-- DMS v1.8.0 Document Management System Database
-- Seed Shop Drawing data
-- Server: Container Station on QNAP TS-473A
-- Database service: MariaDB 11.8
-- database web ui: phpmyadmin 5-apache
-- database development ui: DBeaver
-- backend service: NestJS
-- frontend service: next.js
-- reverse proxy: jc21/nginx-proxy-manager:latest
-- cron service: n8n
-- ==========================================================
INSERT INTO shop_drawing_sub_categories( INSERT INTO shop_drawing_sub_categories(
project_id, project_id,
sub_category_code, sub_category_code,

View File

@@ -0,0 +1,216 @@
{
"meta": {
"instanceId": "lcbp3-migration"
},
"nodes": [
{
"parameters": {},
"id": "trigger-1",
"name": "When clicking Execute Workflow",
"type": "n8n-nodes-base.manualTrigger",
"typeVersion": 1,
"position": [0, 0]
},
{
"parameters": {
"operation": "read",
"fileFormat": "xlsx",
"options": {}
},
"id": "spreadsheet-1",
"name": "Read Excel Data",
"type": "n8n-nodes-base.spreadsheetFile",
"typeVersion": 2,
"position": [200, 0]
},
{
"parameters": {
"batchSize": 10,
"options": {}
},
"id": "split-in-batches-1",
"name": "Split In Batches",
"type": "n8n-nodes-base.splitInBatches",
"typeVersion": 3,
"position": [400, 0]
},
{
"parameters": {
"jsCode": "const item = $input.first();\n\nconst prompt = `You are a Document Controller for a large construction project.\nYour task is to validate document metadata.\nYou MUST respond ONLY with valid JSON. No explanation, no markdown, no extra text.\n\nDocument Number: ${item.json.document_number}\nTitle: ${item.json.title}\nCategory List: [\"Correspondence\",\"RFA\",\"Drawing\",\"Transmittal\",\"Report\",\"Other\"]\n\nRespond ONLY with this exact JSON structure:\n{\n \"is_valid\": true,\n \"confidence\": 0.95,\n \"suggested_category\": \"Correspondence\",\n \"detected_issues\": [],\n \"suggested_title\": null\n}`;\n\nreturn [{\n json: {\n ...item.json,\n ollama_payload: {\n model: \"llama3.2:3b\",\n format: \"json\",\n stream: false,\n prompt: prompt\n }\n }\n}];"
},
"id": "code-1",
"name": "Build Prompt",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [620, 0]
},
{
"parameters": {
"method": "POST",
"url": "http://192.168.20.100:11434/api/generate",
"sendBody": true,
"specifyBody": "json",
"jsonBody": "={{ $json.ollama_payload }}",
"options": {
"timeout": 30000
}
},
"id": "http-1",
"name": "Ollama Local API",
"type": "n8n-nodes-base.httpRequest",
"typeVersion": 4.1,
"position": [840, 0]
},
{
"parameters": {
"jsCode": "const items = $input.all();\nconst parsed = [];\n\nfor (const item of items) {\n try {\n let raw = item.json.response || '';\n raw = raw.replace(/```json/gi, '').replace(/```/g, '').trim();\n const aiResult = JSON.parse(raw);\n parsed.push({ json: { ...item.json, ai_result: aiResult } });\n } catch (err) {\n parsed.push({ json: { ...item.json, ai_result: { confidence: 0, is_valid: false, error: err.message } } });\n }\n}\nreturn parsed;"
},
"id": "code-2",
"name": "Parse JSON",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [1040, 0]
},
{
"parameters": {
"conditions": {
"boolean": [
{
"value1": "={{ $json.ai_result.confidence >= 0.85 && $json.ai_result.is_valid }}",
"value2": true
}
]
}
},
"id": "if-1",
"name": "Confidence >= 0.85?",
"type": "n8n-nodes-base.if",
"typeVersion": 1,
"position": [1240, 0]
},
{
"parameters": {
"method": "POST",
"url": "http://<YOUR_BACKEND_IP>:3000/api/migration/import",
"sendHeaders": true,
"headerParameters": {
"parameters": [
{
"name": "Idempotency-Key",
"value": "={{ $json.document_number }}:BATCH-001"
},
{
"name": "Authorization",
"value": "Bearer <YOUR_MIGRATION_TOKEN>"
}
]
},
"sendBody": true,
"specifyBody": "json",
"jsonBody": "={\n \"source_file_path\": \"/share/np-dms/staging_ai/{{$json.document_number}}.pdf\",\n \"document_number\": \"{{$json.document_number}}\",\n \"title\": \"{{$json.ai_result.suggested_title || $json.title}}\",\n \"category\": \"{{$json.ai_result.suggested_category}}\",\n \"revision\": 1, \n \"batch_id\": \"BATCH_001\",\n \"ai_confidence\": {{$json.ai_result.confidence}},\n \"ai_issues\": {{$json.ai_result.detected_issues}},\n \"legacy_document_number\": \"{{$json.legacy_number}}\"\n}",
"options": {}
},
"id": "http-2",
"name": "LCBP3 Backend (Auto Ingest)",
"type": "n8n-nodes-base.httpRequest",
"typeVersion": 4.1,
"position": [1460, -100]
},
{
"parameters": {
"jsCode": "return [{ json: { message: \"Sent to Human Review Queue OR Check AI Error Log\", data: $input.first().json } }];"
},
"id": "code-3",
"name": "Review Queue / Reject Log",
"type": "n8n-nodes-base.code",
"typeVersion": 2,
"position": [1460, 100]
}
],
"connections": {
"When clicking Execute Workflow": {
"main": [
[
{
"node": "Read Excel Data",
"type": "main",
"index": 0
}
]
]
},
"Read Excel Data": {
"main": [
[
{
"node": "Split In Batches",
"type": "main",
"index": 0
}
]
]
},
"Split In Batches": {
"main": [
[
{
"node": "Build Prompt",
"type": "main",
"index": 0
}
]
]
},
"Build Prompt": {
"main": [
[
{
"node": "Ollama Local API",
"type": "main",
"index": 0
}
]
]
},
"Ollama Local API": {
"main": [
[
{
"node": "Parse JSON",
"type": "main",
"index": 0
}
]
]
},
"Parse JSON": {
"main": [
[
{
"node": "Confidence >= 0.85?",
"type": "main",
"index": 0
}
]
]
},
"Confidence >= 0.85?": {
"main": [
[
{
"node": "LCBP3 Backend (Auto Ingest)",
"type": "main",
"index": 0
}
],
[
{
"node": "Review Queue / Reject Log",
"type": "main",
"index": 0
}
]
]
}
}
}

View File

@@ -1,6 +1,6 @@
-- ========================================================== -- ==========================================================
-- Permission System Verification Queries -- Permission System Verification Queries (v1.8.0)
-- File: specs/07-database/permissions-verification.sql -- File: specs/03-Data-and-Storage/permissions-verification.sql
-- Purpose: Verify permissions setup after seed data deployment -- Purpose: Verify permissions setup after seed data deployment
-- ========================================================== -- ==========================================================
-- ========================================================== -- ==========================================================
@@ -271,6 +271,8 @@ FROM (
SELECT 'drawing.view' SELECT 'drawing.view'
UNION UNION
SELECT 'workflow.action_review' SELECT 'workflow.action_review'
UNION
SELECT 'project.view'
) required_perms ) required_perms
LEFT JOIN permissions p USING (permission_name) LEFT JOIN permissions p USING (permission_name)
ORDER BY permission_name; ORDER BY permission_name;

View File

@@ -10,12 +10,33 @@ x-logging: &default_logging
max-size: "10m" max-size: "10m"
max-file: "5" max-file: "5"
services: services:
n8n-db:
<<: [*restart_policy, *default_logging]
image: postgres:16-alpine
container_name: n8n-db
environment:
- POSTGRES_USER=n8n
- POSTGRES_PASSWORD=Np721220$
- POSTGRES_DB=n8n
volumes:
- "/share/np-dms/n8n/postgres-data:/var/lib/postgresql/data"
networks:
lcbp3: {}
healthcheck:
test: ['CMD-SHELL', 'pg_isready -h localhost -U n8n -d n8n']
interval: 10s
timeout: 5s
retries: 5
n8n: n8n:
<<: [*restart_policy, *default_logging] <<: [*restart_policy, *default_logging]
image: n8nio/n8n:1.78.0 image: n8nio/n8n:latest
container_name: n8n container_name: n8n
stdin_open: true stdin_open: true
tty: true tty: true
depends_on:
n8n-db:
condition: service_healthy
deploy: deploy:
resources: resources:
limits: limits:
@@ -38,14 +59,19 @@ services:
N8N_DIAGNOSTICS_ENABLED: 'false' N8N_DIAGNOSTICS_ENABLED: 'false'
N8N_SECURE_COOKIE: 'true' N8N_SECURE_COOKIE: 'true'
N8N_ENCRYPTION_KEY: "9AAIB7Da9DW1qAhJE5/Bz4SnbQjeAngI" N8N_ENCRYPTION_KEY: "9AAIB7Da9DW1qAhJE5/Bz4SnbQjeAngI"
N8N_BASIC_AUTH_ACTIVE: 'true'
N8N_BASIC_AUTH_USER: admin
N8N_BASIC_AUTH_PASSWORD: Center#2025
N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS: 'true' N8N_ENFORCE_SETTINGS_FILE_PERMISSIONS: 'true'
GENERIC_TIMEZONE: "Asia/Bangkok" GENERIC_TIMEZONE: "Asia/Bangkok"
# DB: MySQL/MariaDB removed in n8n v1.x — now using SQLite (default) # DB Setup
# Data is persisted in /home/node/.n8n (mounted volume below) DB_TYPE: postgresdb
DB_TYPE: sqlite DB_POSTGRESDB_DATABASE: n8n
DB_POSTGRESDB_HOST: n8n-db
DB_POSTGRESDB_PORT: 5432
DB_POSTGRESDB_USER: n8n
DB_POSTGRESDB_PASSWORD: Np721220$
# Data Prune
EXECUTIONS_DATA_PRUNE: 'true'
EXECUTIONS_DATA_MAX_AGE: 168
EXECUTIONS_DATA_PRUNE_TIMEOUT: 60
ports: ports:
- "5678:5678" - "5678:5678"
@@ -57,6 +83,10 @@ services:
- "/share/np-dms/n8n/scripts:/scripts" - "/share/np-dms/n8n/scripts:/scripts"
- "/share/np-dms/n8n/data:/data" - "/share/np-dms/n8n/data:/data"
- "/var/run/docker.sock:/var/run/docker.sock" - "/var/run/docker.sock:/var/run/docker.sock"
# read-only: อ่านไฟล์ PDF ต้นฉบับเท่านั้น
- "/share/np-dms-as/Legacy:/share/np-dms/staging_ai:ro"
# read-write: เขียน Log และ CSV ทั้งหมด
- "/share/np-dms/n8n/migration_logs:/share/np-dms/n8n/migration_logs:rw"
healthcheck: healthcheck:
test: ["CMD-SHELL", "wget -qO- http://127.0.0.1:5678/healthz || exit 1"] test: ["CMD-SHELL", "wget -qO- http://127.0.0.1:5678/healthz || exit 1"]
@@ -70,6 +100,8 @@ networks:
external: true external: true
# สำหรับ n8n volumes # สำหรับ n8n volumes
# chown -R 1000:1000 /share/Container/n8n # chown -R 1000:1000 /share/np-dms/n8n
# chmod -R 755 /share/Container/n8n # chmod -R 755 /share/np-dms/n8n3
# chown -R 999:999 /share/np-dms/n8n/postgres-data
# chmod -R 700 /share/np-dms/n8n/postgres-data

View File

@@ -9,9 +9,7 @@
- [n8n Migration Setup Guide](../03-Data-and-Storage/03-05-n8n-migration-setup-guide.md) - [n8n Migration Setup Guide](../03-Data-and-Storage/03-05-n8n-migration-setup-guide.md)
- [Software Architecture](../02-Architecture/02-02-software-architecture.md) - [Software Architecture](../02-Architecture/02-02-software-architecture.md)
- [Data Dictionary](../03-Data-and-Storage/03-01-data-dictionary.md) - [Data Dictionary](../03-Data-and-Storage/03-01-data-dictionary.md)
> **Note:** ADR-017 is clarified and hardened by ADR-018 regarding AI physical isolation. Category Enum system-driven, Idempotency Contract, Duplicate Handling Clarification, Storage Enforcement, Audit Log Enhancement, Review Queue Integration, Revision Drift Protection, Execution Time, Encoding Normalization, Security Hardening, Orchestrator on QNAP, AI Physical Isolation (Desktop Desk-5439).
> **Note:** ADR-017 is clarified and hardened by ADR-018 regarding AI physical isolation. Category Enum system-driven, Idempotency Contract, Duplicate Handling Clarification, Storage Enforcement, Audit Log Enhancement, Review Queue Integration, Revision Drift Protection, Execution Time, Encoding Normalization, Security Hardening, AI Physical Isolation (ASUSTOR).
--- ---
## Context and Problem Statement ## Context and Problem Statement
@@ -84,18 +82,18 @@
## Implementation Summary ## Implementation Summary
| Component | รายละเอียด | | Component | รายละเอียด |
| ---------------------- | ------------------------------------------------------------- | | ---------------------- | ------------------------------------------------------------------------------- |
| Migration Orchestrator | n8n (Docker บน ASUSTOR NAS) | | Migration Orchestrator | n8n (Docker บน QNAP NAS) |
| AI Model Primary | Ollama `llama3.2:3b` | | AI Model Primary | Ollama `llama3.2:3b` |
| AI Model Fallback | Ollama `mistral:7b-instruct-q4_K_M` | | AI Model Fallback | Ollama `mistral:7b-instruct-q4_K_M` |
| Hardware | ASUSTOR NAS (AI Processing Only) | | Hardware | QNAP NAS (Orchestrator) + Desktop Desk-5439 (AI Processing, RTX 2060 SUPER 8GB) |
| Data Ingestion | RESTful API + Migration Token (7 วัน) + Idempotency-Key Header | | Data Ingestion | RESTful API + Migration Token (7 วัน) + Idempotency-Key Header |
| Concurrency | Sequential — 1 Request/ครั้ง, Delay 2 วินาที | | Concurrency | Sequential — 1 Request/ครั้ง, Delay 2 วินาที |
| Checkpoint | MariaDB `migration_progress` | | Checkpoint | MariaDB `migration_progress` |
| Fallback | Auto-switch Model เมื่อ Error ≥ Threshold | | Fallback | Auto-switch Model เมื่อ Error ≥ Threshold |
| Storage | Backend StorageService เท่านั้น — ห้าม move file โดยตรง | | Storage | Backend StorageService เท่านั้น — ห้าม move file โดยตรง |
| Expected Runtime | ~16.6 ชั่วโมง (~34 คืน) สำหรับ 20,000 records | | Expected Runtime | ~16.6 ชั่วโมง (~34 คืน) สำหรับ 20,000 records |
--- ---