fix(prisma): many small fixes due to prisma updated version
This commit is contained in:
parent
ca1123e636
commit
047871fe35
|
|
@ -3,7 +3,6 @@ import * as nodeCrypto from 'crypto';
|
|||
if (!(globalThis as any).crypto) {
|
||||
(globalThis as any).crypto = nodeCrypto;
|
||||
}
|
||||
import { ensureAttachmentsTmpDir } from './time-and-attendance/attachments/config/attachment.fs';
|
||||
import { NestFactory, Reflector } from '@nestjs/core';
|
||||
import { AppModule } from './app.module';
|
||||
import { ModulesGuard } from './common/guards/modules.guard';
|
||||
|
|
@ -50,7 +49,6 @@ async function bootstrap() {
|
|||
credentials: true,
|
||||
});
|
||||
|
||||
await ensureAttachmentsTmpDir();
|
||||
await app.listen(process.env.PORT ?? 3000);
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,46 +0,0 @@
|
|||
import { Controller, UseInterceptors, Post, Get, Param, Res, UploadedFile, Body, Delete, Query } from "@nestjs/common";
|
||||
import { FileInterceptor } from "@nestjs/platform-express";
|
||||
|
||||
import { memoryStorage } from 'multer';
|
||||
import { Response } from 'express';
|
||||
import { AdminSearchDto } from "src/time-and-attendance/attachments/dtos/search-filters.dto";
|
||||
import { UploadMetaAttachmentsDto } from "src/time-and-attendance/attachments/dtos/upload-meta-attachments.dto";
|
||||
import { maxUploadBytes } from "src/time-and-attendance/attachments/upload.config";
|
||||
import { AttachmentDeleteService } from "src/time-and-attendance/attachments/services/attachment-delete.service";
|
||||
import { AttachmentGetService } from "src/time-and-attendance/attachments/services/attachment-get.service";
|
||||
import { AttachmentUploadService } from "src/time-and-attendance/attachments/services/attachment-upload.service";
|
||||
|
||||
@Controller('attachments')
|
||||
export class AttachmentsController {
|
||||
constructor(
|
||||
private readonly uploadService: AttachmentUploadService,
|
||||
private readonly deleteService: AttachmentDeleteService,
|
||||
private readonly getService: AttachmentGetService,
|
||||
) { }
|
||||
|
||||
@Get(':id')
|
||||
async getById(@Param('id') id: string, @Query('variant') variant: string | undefined, @Res() res: Response) {
|
||||
return await this.getService.findAttachmentById(id, variant, res)
|
||||
}
|
||||
|
||||
@Get('variants/:id')
|
||||
async getlistVariantsById(@Param('id') id: string) {
|
||||
return await this.getService.getListVariants(id);
|
||||
}
|
||||
|
||||
@Delete(':id')
|
||||
async remove(@Param('id') id: string) {
|
||||
return await this.deleteService.deleteAttachment(id);
|
||||
}
|
||||
|
||||
@Post()
|
||||
@UseInterceptors(FileInterceptor('file', { storage: memoryStorage(), limits: { fileSize: maxUploadBytes() } }))
|
||||
async upload(@UploadedFile() file?: Express.Multer.File, @Body() meta?: UploadMetaAttachmentsDto) {
|
||||
return await this.uploadService.uploadAttachment(file, meta);
|
||||
}
|
||||
|
||||
@Get('search/filters')
|
||||
async searchWithFilters(@Query() dto: AdminSearchDto) {
|
||||
return await this.getService.searchAttachmentWithFilters(dto);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AttachmentsController } from "src/time-and-attendance/attachments/attachments.controller";
|
||||
import { AttachmentArchivalService } from "src/time-and-attendance/attachments/services/attachment-archival.service";
|
||||
import { AttachmentDeleteService } from "src/time-and-attendance/attachments/services/attachment-delete.service";
|
||||
import { AttachmentGetService } from "src/time-and-attendance/attachments/services/attachment-get.service";
|
||||
import { AttachmentUploadService } from "src/time-and-attendance/attachments/services/attachment-upload.service";
|
||||
import { DiskStorageService } from "src/time-and-attendance/attachments/services/disk-storage.service";
|
||||
import { GarbargeCollectorService } from "src/time-and-attendance/attachments/services/garbage-collector.service";
|
||||
import { VariantsQueue } from "src/time-and-attendance/attachments/services/variants.queue";
|
||||
@Module({
|
||||
controllers: [ AttachmentsController],
|
||||
providers: [
|
||||
AttachmentArchivalService,
|
||||
GarbargeCollectorService,
|
||||
DiskStorageService,
|
||||
VariantsQueue,
|
||||
AttachmentDeleteService,
|
||||
AttachmentUploadService,
|
||||
AttachmentGetService,
|
||||
],
|
||||
exports: [
|
||||
AttachmentArchivalService,
|
||||
GarbargeCollectorService
|
||||
],
|
||||
})
|
||||
export class ArchivalAttachmentModule {}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
import { join } from "node:path";
|
||||
|
||||
export function casPathFor(hash: string) {
|
||||
const a = hash.slice(0, 2), b = hash.slice(2, 4);
|
||||
return `sha256/${a}/${b}/${hash}`;
|
||||
}
|
||||
|
||||
//chemin absolue du storage
|
||||
export function getAbsolutePath(storagePathRel: string) {
|
||||
return join(this.root, storagePathRel);
|
||||
}
|
||||
|
||||
|
||||
export function startOfYear(): Date {
|
||||
const now = new Date();
|
||||
return new Date(Date.UTC(now.getUTCFullYear(), 0, 1, 0, 0, 0, 0));
|
||||
}
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
import { join } from "path";
|
||||
|
||||
export function resolveAttachmentsRoot() {
|
||||
const explicit = process.env.ATTACHMENTS_ROOT?.trim();
|
||||
if (explicit) return explicit; //direct filepath if possible
|
||||
|
||||
const id = (process.env.ATTACHMENTS_SERVER_ID ?? 'server').trim();
|
||||
return process.platform === 'win32' ? `\\\\${id}\\attachments` : `/mnt/attachments`; //check if server is using windows or linux
|
||||
}
|
||||
export const ATT_TMP_DIR = () => join(resolveAttachmentsRoot(), '_tmp');
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
import { promises as fs } from "node:fs";
|
||||
import { ATT_TMP_DIR } from "./attachment.config";
|
||||
|
||||
export async function ensureAttachmentsTmpDir() {
|
||||
const tmp = ATT_TMP_DIR(); //<ROOT>/_tmp
|
||||
await fs.mkdir(tmp, { recursive: true }); // create if missing
|
||||
|
||||
|
||||
return tmp;
|
||||
}
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import { Provider } from "@nestjs/common";
|
||||
import { resolveAttachmentsRoot } from "./attachment.config";
|
||||
export const ATTACHMENTS_ROOT = Symbol('ATTACHMENTS_ROOT');
|
||||
|
||||
export const attachmentsRootProvider: Provider = {
|
||||
provide: ATTACHMENTS_ROOT,
|
||||
useFactory: () => resolveAttachmentsRoot(),
|
||||
};
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
import { attachmentsRootProvider } from "./attachment.provider";
|
||||
import { Module } from "@nestjs/common";
|
||||
|
||||
@Module({
|
||||
providers: [attachmentsRootProvider],
|
||||
exports: [attachmentsRootProvider],
|
||||
})
|
||||
export class AppConfigModule {}
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
import { IsInt, IsOptional, IsString, Max, Min } from "class-validator";
|
||||
|
||||
export class AdminSearchDto {
|
||||
@IsOptional() @IsString() owner_type?: string;
|
||||
@IsOptional() @IsString() owner_id?: string;
|
||||
@IsOptional() date_from?: string;
|
||||
@IsOptional() date_to?: string;
|
||||
@IsOptional() @IsInt() @Min(1) page?: number = 1;
|
||||
@IsOptional() @IsInt() @Min(1) @Max(200) page_size?: number = 50;
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
export class UploadMetaAttachmentsDto {
|
||||
owner_type!: string;
|
||||
owner_id!: string;
|
||||
retention_policy!: 'EXPENSE_7Y' | 'TICKET_2Y' | 'PROFILE_KEEP_LAST3';
|
||||
created_by!: string;
|
||||
}
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
import { Injectable } from "@nestjs/common";
|
||||
import { Cron } from "@nestjs/schedule";
|
||||
import { startOfYear } from "src/time-and-attendance/attachments/cas.util";
|
||||
import { PrismaPostgresService } from "prisma/postgres/prisma-postgres.service";
|
||||
|
||||
@Injectable()
|
||||
export class AttachmentArchivalService {
|
||||
private readonly batch_size = Number(process.env.ARCHIVE_BATCH_SIZE || 1000);
|
||||
private readonly cron_expression = process.env.ARCHIVE_CRON || '0 3 * * 1';
|
||||
|
||||
constructor(private readonly prisma: PrismaPostgresService) { }
|
||||
|
||||
|
||||
@Cron(function (this: AttachmentArchivalService) { return this.cron_expression; } as any)
|
||||
async runScheduled() {
|
||||
await this.archiveCutoffToStartOfYear();
|
||||
}
|
||||
|
||||
//archive everything before current year
|
||||
async archiveCutoffToStartOfYear() {
|
||||
const cutoff = startOfYear();
|
||||
console.log(`Archival: cutoff=${cutoff.toISOString()} batch=${this.batch_size}`);
|
||||
|
||||
let moved = 0, total = 0, i = 0;
|
||||
do {
|
||||
moved = await this.archiveBatch(cutoff, this.batch_size);
|
||||
total += moved;
|
||||
i++;
|
||||
if (moved > 0) console.log(`Batch #${i}: moved ${moved}`);
|
||||
} while (moved === this.batch_size);
|
||||
|
||||
console.log(`Archival done: total moved : ${total}`);
|
||||
return { moved: total };
|
||||
}
|
||||
|
||||
//only moves table content to archive and not blobs.
|
||||
private async archiveBatch(cutoff: Date, batch_size: number): Promise<number> {
|
||||
const moved = await this.prisma.client.$executeRaw `
|
||||
WITH moved AS (
|
||||
DELETE FROM "attachments"
|
||||
WHERE id IN (
|
||||
SELECT id FROM "attachments"
|
||||
WHERE created_at < ${cutoff}
|
||||
ORDER BY id
|
||||
LIMIT ${batch_size}
|
||||
)
|
||||
RETURNING id, sha256, owner_type, owner_id, original_name, status, retention_policy, created_by, created_at
|
||||
)
|
||||
INSERT INTO archive.attachments_archive
|
||||
(id, sha256, owner_type, owner_id, original_name, status, retention_policy, created_by, created_at)
|
||||
SELECT * FROM moved;`;
|
||||
return Number(moved) || 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
import { Injectable } from "@nestjs/common";
|
||||
import { Result } from "src/common/errors/result-error.factory";
|
||||
import { PrismaPostgresService } from "prisma/postgres/prisma-postgres.service";
|
||||
|
||||
@Injectable()
|
||||
export class AttachmentDeleteService {
|
||||
constructor(private readonly prisma: PrismaPostgresService) { }
|
||||
|
||||
async deleteAttachment(id: string): Promise<Result<boolean, string>> {
|
||||
await this.prisma.client.$transaction(async (tx) => {
|
||||
const attachment = await tx.attachments.findUnique({ where: { id: Number(id) } });
|
||||
if (!attachment) return { success: false, error: 'ATTACHMENT_NOT_FOUND' };
|
||||
|
||||
// soft-delete
|
||||
await tx.attachments.update({ where: { id: Number(id) }, data: { status: 'DELETED' } });
|
||||
|
||||
// decrement refcount
|
||||
const dec = await tx.$executeRaw`
|
||||
UPDATE "blobs" SET refcount = refcount - 1
|
||||
WHERE sha256 = ${attachment.sha256} AND refcount > 0;`;
|
||||
|
||||
return { ok: true, decremented: dec > 0 };
|
||||
});
|
||||
return { success: true, data: true };
|
||||
}
|
||||
}
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
import { Injectable } from "@nestjs/common";
|
||||
import { Response } from "express";
|
||||
import { AdminSearchDto } from "src/time-and-attendance/attachments/dtos/search-filters.dto";
|
||||
import { PrismaPostgresService } from "prisma/postgres/prisma-postgres.service";
|
||||
import { resolveAttachmentsRoot } from "src/time-and-attendance/attachments/config/attachment.config";
|
||||
import * as path from 'node:path';
|
||||
import { promises as fsp } from 'node:fs';
|
||||
import { createReadStream } from "node:fs";
|
||||
import { fileTypeFromFile } from "file-type";
|
||||
import { Result } from "src/common/errors/result-error.factory";
|
||||
|
||||
@Injectable()
|
||||
export class AttachmentGetService {
|
||||
constructor(
|
||||
private readonly prisma: PrismaPostgresService,
|
||||
|
||||
) { }
|
||||
|
||||
async getListVariants(id: string): Promise<Result<any, string>> {
|
||||
const num_id = Number(id);
|
||||
if (!Number.isFinite(num_id)) return { success: false, error: 'INVALID_ATTACHMENTS' };
|
||||
const variants = await this.prisma.client.attachmentVariants.findMany({
|
||||
where: { attachment_id: num_id },
|
||||
orderBy: { variant: 'asc' },
|
||||
select: { variant: true, bytes: true, width: true, height: true, path: true, created_at: true },
|
||||
});
|
||||
return { success: true, data: variants };
|
||||
}
|
||||
|
||||
async searchAttachmentWithFilters(dto: AdminSearchDto): Promise<Result<any, string>> {
|
||||
const where: any = {};
|
||||
if (dto.owner_type) where.owner_type = dto.owner_type;
|
||||
if (dto.owner_id) where.owner_id = dto.owner_id;
|
||||
|
||||
if (dto.date_from || dto.date_to) {
|
||||
where.created_at = {};
|
||||
if (dto.date_from) where.created_at.gte = new Date(dto.date_from + 'T00:00:00Z');
|
||||
if (dto.date_to) where.created_at.lte = new Date(dto.date_to + 'T23:59:59Z');
|
||||
}
|
||||
|
||||
const page = dto.page ?? 1;
|
||||
const page_size = dto.page_size ?? 50;
|
||||
const skip = (page - 1) * page_size;
|
||||
const take = page_size;
|
||||
|
||||
const [items, total] = await this.prisma.client.$transaction([
|
||||
this.prisma.client.attachments.findMany({
|
||||
where,
|
||||
orderBy: { created_at: 'desc' },
|
||||
skip, take,
|
||||
include: {
|
||||
blob: {
|
||||
select: { mime: true, size: true, storage_path: true, sha256: true },
|
||||
},
|
||||
},
|
||||
}),
|
||||
this.prisma.client.attachments.count({ where }),
|
||||
]);
|
||||
|
||||
return { success: true, data: { page, page_size: take, total, items } };
|
||||
}
|
||||
|
||||
|
||||
async findAttachmentById(id: string, variant: string | undefined, res: Response): Promise<Result<boolean, string>> {
|
||||
const num_id = Number(id);
|
||||
if (!Number.isFinite(num_id)) return { success: false, error: 'INVALID_ATTACHMENTS' };
|
||||
|
||||
const attachment = await this.prisma.client.attachments.findUnique({
|
||||
where: { id: num_id },
|
||||
include: { blob: true },
|
||||
});
|
||||
if (!attachment) return { success: false, error: 'ATTACHMENT_NOT_FOUND' };
|
||||
|
||||
const relative = variant ? `${attachment.blob.storage_path}.${variant}` : attachment.blob.storage_path;
|
||||
const abs = path.join(resolveAttachmentsRoot(), relative);
|
||||
|
||||
let stat;
|
||||
try {
|
||||
stat = await fsp.stat(abs);
|
||||
} catch {
|
||||
return { success: false, error: 'INVALID_FILE_PATH' };
|
||||
}
|
||||
|
||||
let mime = attachment.blob.mime;
|
||||
try {
|
||||
const kind = await fileTypeFromFile(abs);
|
||||
if (kind?.mime) mime = kind.mime;
|
||||
} catch { }
|
||||
res.set('Content-Type', mime);
|
||||
res.set('Content-Length', String(stat.size));
|
||||
res.set('ETag', `"sha256-${attachment.blob.sha256}${variant ? '.' + variant : ''}"`);
|
||||
res.set('Last-Modified', stat.mtime.toUTCString());
|
||||
res.set('Cache-Control', 'private, max-age=31536000, immutable');
|
||||
res.set('X-Content-Type-Options', 'nosniff');
|
||||
|
||||
createReadStream(abs).pipe(res);
|
||||
return { success: true, data: true };
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,87 +0,0 @@
|
|||
import { Injectable } from "@nestjs/common";
|
||||
import { allowedMimes } from "src/time-and-attendance/attachments/upload.config";
|
||||
import { UploadMetaAttachmentsDto } from "src/time-and-attendance/attachments/dtos/upload-meta-attachments.dto";
|
||||
import { Readable } from "node:stream";
|
||||
import { PrismaPostgresService } from "prisma/postgres/prisma-postgres.service";
|
||||
import { fileTypeFromBuffer } from "file-type";
|
||||
import { Result } from "src/common/errors/result-error.factory";
|
||||
import { DiskStorageService } from "src/time-and-attendance/attachments/services/disk-storage.service";
|
||||
import { VariantsQueue } from "src/time-and-attendance/attachments/services/variants.queue";
|
||||
|
||||
@Injectable()
|
||||
export class AttachmentUploadService {
|
||||
constructor(
|
||||
private readonly prisma: PrismaPostgresService,
|
||||
private readonly disk: DiskStorageService,
|
||||
private readonly variantsQ: VariantsQueue,
|
||||
) { }
|
||||
|
||||
async uploadAttachment(file?: Express.Multer.File, meta?: UploadMetaAttachmentsDto): Promise<Result<any, string>> {
|
||||
if (!file) return { success: false, error: 'FILE_NOT_FOUND' };
|
||||
|
||||
//magic detection using binary signature
|
||||
const kind = await fileTypeFromBuffer(file.buffer).catch(() => null);
|
||||
const detected_mime = kind?.mime || file.mimetype || 'application/octet-stream';
|
||||
|
||||
//strict whitelist
|
||||
if (!allowedMimes().includes(detected_mime)) {
|
||||
return { success: false, error: 'INVALID_ATTACHMENT_TYPE' };
|
||||
}
|
||||
|
||||
//Saving FS (hash + CAS + unDupes)
|
||||
const stream = Readable.from(file.buffer);
|
||||
const { sha256, storage_path, size } = await this.disk.saveStreamAndHash(stream);
|
||||
|
||||
const now = new Date();
|
||||
const attachment = await this.prisma.client.$transaction(async (tx) => {
|
||||
//upsert blob: +1 ref
|
||||
await tx.blobs.upsert({
|
||||
where: { sha256 },
|
||||
create: {
|
||||
sha256,
|
||||
storage_path: storage_path,
|
||||
size,
|
||||
mime: detected_mime,
|
||||
refcount: 1,
|
||||
created_at: now,
|
||||
},
|
||||
update: { //only increment, does not change the storage path
|
||||
refcount: { increment: 1 },
|
||||
mime: detected_mime, //update mime and size to keep last image
|
||||
size,
|
||||
},
|
||||
});
|
||||
|
||||
const att = await tx.attachments.create({
|
||||
data: {
|
||||
sha256,
|
||||
owner_type: meta?.owner_type ?? 'EXPENSE',
|
||||
owner_id: meta?.owner_id ?? 'unknown',
|
||||
original_name: file.originalname,
|
||||
status: 'ACTIVE',
|
||||
retention_policy: (meta?.retention_policy ?? 'EXPENSE_7Y') as any,
|
||||
created_by: meta?.created_by ?? 'system',
|
||||
created_at: now,
|
||||
},
|
||||
});
|
||||
return att;
|
||||
});
|
||||
|
||||
await this.variantsQ.enqueue(attachment.id, detected_mime);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: {
|
||||
ok: true,
|
||||
id: attachment.id,
|
||||
sha256,
|
||||
storage_path: storage_path,
|
||||
size,
|
||||
mime: detected_mime,
|
||||
original_name: file.originalname,
|
||||
owner_type: attachment.owner_type,
|
||||
owner_id: attachment.owner_id,
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
@ -1,60 +0,0 @@
|
|||
import { Injectable } from '@nestjs/common';
|
||||
import { createHash } from 'node:crypto';
|
||||
import { promises as fsp } from 'node:fs';
|
||||
import { createWriteStream, statSync, existsSync } from 'node:fs';
|
||||
import { join, dirname } from 'node:path';
|
||||
import { pipeline } from 'node:stream/promises';
|
||||
import { ATT_TMP_DIR } from 'src/time-and-attendance/attachments/config/attachment.config';
|
||||
import { casPathFor, getAbsolutePath } from 'src/time-and-attendance/attachments/cas.util';
|
||||
|
||||
export type SaveResult = { sha256: string, storage_path: string, size: number };
|
||||
|
||||
@Injectable()
|
||||
export class DiskStorageService {
|
||||
// async exists(storagePathRel: string) {
|
||||
// try {
|
||||
// statSync(getAbsolutePath(storagePathRel));
|
||||
// return true;
|
||||
// } catch {
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
|
||||
//adds file and hash it
|
||||
async saveStreamAndHash(input: NodeJS.ReadableStream): Promise<SaveResult> {
|
||||
// 1- writing in ROOT:/_tmp while streaming and hashing
|
||||
const tmpDir = ATT_TMP_DIR();
|
||||
await fsp.mkdir(tmpDir, { recursive: true });
|
||||
const tmpPath = join(tmpDir, `up_${Date.now()}_${Math.random().toString(36).slice(2)}`);
|
||||
|
||||
const hash = createHash('sha256');
|
||||
const tmpOut = createWriteStream(tmpPath);
|
||||
input.on('data', (chunk) => hash.update(chunk));
|
||||
await pipeline(input, tmpOut); //await end of writing stream
|
||||
|
||||
const sha = hash.digest('hex');
|
||||
const rel = casPathFor(sha);
|
||||
const finalAbs = getAbsolutePath(rel);
|
||||
|
||||
// 2- is there is no destination => move (atomic renaming on the same volume)
|
||||
if (!existsSync(finalAbs)) {
|
||||
await fsp.mkdir(dirname(finalAbs), { recursive: true });
|
||||
try {
|
||||
await fsp.rename(tmpPath, finalAbs);
|
||||
} catch (e) {
|
||||
//if someone is faster and used the same hash
|
||||
if (existsSync(finalAbs)) {
|
||||
await fsp.rm(tmpPath, { force: true });
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
//remove duplicata if already exists
|
||||
await fsp.rm(tmpPath, { force: true });
|
||||
}
|
||||
|
||||
const size = statSync(finalAbs).size;
|
||||
return { sha256: sha, storage_path: rel, size };
|
||||
}
|
||||
}
|
||||
|
|
@ -1,78 +0,0 @@
|
|||
import { Injectable, Logger } from "@nestjs/common";
|
||||
import { Cron } from "@nestjs/schedule";
|
||||
import { PrismaPostgresService } from 'prisma/postgres/prisma-postgres.service';
|
||||
import * as path from 'node:path';
|
||||
import { promises as fsp } from 'node:fs';
|
||||
import { resolveAttachmentsRoot } from "src/time-and-attendance/attachments/config/attachment.config";
|
||||
|
||||
@Injectable()
|
||||
export class GarbargeCollectorService {
|
||||
private readonly logger = new Logger(GarbargeCollectorService.name);
|
||||
|
||||
//.env refs
|
||||
private readonly batch_size = Number(process.env.GC_BATCH_SIZE || 500);
|
||||
private readonly cron_expression = process.env.GC_CRON || '15 4 * * *'; // everyday at 04:15 AM
|
||||
|
||||
//fetchs root of storage
|
||||
private readonly root = resolveAttachmentsRoot();
|
||||
|
||||
constructor(private readonly prisma: PrismaPostgresService) { }
|
||||
|
||||
//planif for the Cronjob
|
||||
@Cron(function (this: GarbargeCollectorService) { return this.cron_expression; } as any)
|
||||
async runScheduled() {
|
||||
await this.collect();
|
||||
}
|
||||
|
||||
//Manage Garbage collecting by batch of elements until a batch != full
|
||||
async collect() {
|
||||
let total = 0, round = 0;
|
||||
//infinit loop (;;) with break
|
||||
for (; ;) {
|
||||
round++;
|
||||
const num = await this.collectBatch();
|
||||
total += num;
|
||||
this.logger.log(`Garbage Collector round #${round} removed ${num}`);
|
||||
if (num < this.batch_size) break; //breaks if not a full batch
|
||||
}
|
||||
this.logger.log(`Garbage Collecting done: total removed ${total}`);
|
||||
return { removed: total };
|
||||
}
|
||||
|
||||
//Manage a single lot of orphan blobs
|
||||
private async collectBatch(): Promise<number> {
|
||||
const blobs = await this.prisma.client.blobs.findMany({
|
||||
where: { refcount: { lte: 0 } },
|
||||
select: { sha256: true, storage_path: true },
|
||||
take: this.batch_size,
|
||||
});
|
||||
if (blobs.length === 0) return 0;
|
||||
|
||||
// delete original file and all its variants <hash> in the same file
|
||||
await Promise.all(
|
||||
blobs.map(async (blob) => {
|
||||
const absolute_path = path.join(this.root, blob.storage_path);
|
||||
await this.deleteFileIfExists(absolute_path); //tries to delete original file if found
|
||||
|
||||
const dir = path.dirname(absolute_path);
|
||||
const base = path.basename(absolute_path);
|
||||
try {
|
||||
const entries = await fsp.readdir(dir, { withFileTypes: true });
|
||||
const targets = entries.filter(entry => entry.isFile() && entry.name.startsWith(base + '.'))
|
||||
.map(entry => path.join(dir, entry.name));
|
||||
//deletes all variants
|
||||
await Promise.all(targets.map(target => this.deleteFileIfExists(target)));
|
||||
} catch { }
|
||||
})
|
||||
);
|
||||
//deletes blobs lignes if file is deleted
|
||||
const hashes = blobs.map(blob => blob.sha256);
|
||||
await this.prisma.client.blobs.deleteMany({ where: { sha256: { in: hashes } } });
|
||||
return blobs.length;
|
||||
}
|
||||
|
||||
//helper: deletes path if exists and ignore errors
|
||||
private async deleteFileIfExists(path: string) {
|
||||
try { await fsp.unlink(path); } catch { }
|
||||
}
|
||||
}
|
||||
|
|
@ -1,21 +0,0 @@
|
|||
import { Injectable } from "@nestjs/common";
|
||||
import { Queue } from "bullmq";
|
||||
|
||||
@Injectable()
|
||||
export class VariantsQueue {
|
||||
private queue: Queue;
|
||||
|
||||
constructor() {
|
||||
const name = `${process.env.BULL_PREFIX || 'attachments'}:variants`;
|
||||
this.queue = new Queue(name, { connection: { url: process.env.REDIS_URL! } });
|
||||
}
|
||||
|
||||
enqueue(attachment_id: number, mime: string) {
|
||||
if (!mime.startsWith('image/')) return Promise.resolve();
|
||||
return this.queue.add(
|
||||
'generate',
|
||||
{ attachment_id, mime },
|
||||
{ attempts: 3, backoff: { type: 'exponential', delay: 2000 } }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
export const maxUploadBytes = () =>
|
||||
(Number(process.env.MAX_UPLOAD_MB || 25)) * 1024 * 1024;
|
||||
|
||||
export const allowedMimes = () =>
|
||||
(process.env.ALLOWED_MIME || 'image/jpeg,image/png,image/webp,application/pdf')
|
||||
.split(',').map(s => s.trim()).filter(Boolean);
|
||||
|
|
@ -1,56 +0,0 @@
|
|||
import 'dotenv/config';
|
||||
import { Worker } from 'bullmq';
|
||||
import sharp from 'sharp';
|
||||
import { PrismaClient } from 'prisma/postgres/generated/prisma/client/postgres/client';
|
||||
import * as path from 'node:path';
|
||||
import { promises as fsp } from 'node:fs';
|
||||
import { resolveAttachmentsRoot } from 'src/time-and-attendance/attachments/config/attachment.config';
|
||||
import { adapterPostgres } from 'prisma.config.postgres';
|
||||
|
||||
const prisma = new PrismaClient({ adapter: adapterPostgres });
|
||||
const q_name = `${process.env.BULL_PREFIX || 'attachments'}:variants`;
|
||||
const root = resolveAttachmentsRoot();
|
||||
|
||||
const variants = [
|
||||
{ name: 'thumb.jpeg', build: (s: sharp.Sharp) => s.rotate().jpeg({ quality: 80 }).resize({ width: 128 }) },
|
||||
{ name: '256w.webp', build: (s: sharp.Sharp) => s.rotate().webp({ quality: 80 }).resize({ width: 256 }) },
|
||||
{ name: '1024w.webp', build: (s: sharp.Sharp) => s.rotate().webp({ quality: 82 }).resize({ width: 1024 }) },
|
||||
]
|
||||
|
||||
new Worker(q_name, async job => {
|
||||
const attachment_id: number = job.data.attachmentId ?? job.data.attachment_id;
|
||||
if (!attachment_id) return;
|
||||
|
||||
const attachment = await prisma.attachments.findUnique({
|
||||
where: { id: attachment_id },
|
||||
include: { blob: true },
|
||||
});
|
||||
if (!attachment) return;
|
||||
|
||||
const source_abs = path.join(root, attachment.blob.storage_path);
|
||||
|
||||
for (const variant of variants) {
|
||||
const relative = `${attachment.blob.storage_path}.${variant.name}`;
|
||||
const out_Abs = path.join(root, relative);
|
||||
|
||||
//try for idem paths
|
||||
try { await fsp.stat(out_Abs); continue; } catch { }
|
||||
|
||||
await fsp.mkdir(path.dirname(out_Abs), { recursive: true });
|
||||
|
||||
//generate variant
|
||||
await variant.build(sharp(source_abs)).toFile(out_Abs);
|
||||
|
||||
//meta data of generated variant file
|
||||
const meta = await sharp(out_Abs).metadata();
|
||||
const bytes = (await fsp.stat(out_Abs)).size;
|
||||
await prisma.attachmentVariants.upsert({
|
||||
where: { attachment_id_variant: { attachment_id: attachment_id, variant: variant.name } },
|
||||
update: { path: relative, bytes, width: meta.width ?? null, height: meta.height ?? null },
|
||||
create: { path: relative, bytes, width: meta.width ?? null, height: meta.height ?? null, attachment_id: attachment_id, variant: variant.name },
|
||||
} as any);
|
||||
}
|
||||
}, {
|
||||
connection: { url: process.env.REDIS_URL }, concurrency: 3
|
||||
}
|
||||
);
|
||||
|
|
@ -8,7 +8,6 @@ export class ExpenseDto {
|
|||
@IsBoolean() is_approved: boolean;
|
||||
@IsOptional() @Type(()=> Number) amount?: number;
|
||||
@IsOptional() @Type(()=> Number) mileage?: number;
|
||||
@IsOptional() @IsInt() attachment?: number;
|
||||
@IsOptional() @IsInt() timesheet_id?: number;
|
||||
@IsOptional() @IsString() @MaxLength(280) comment: string;
|
||||
@IsOptional() @IsString() @MaxLength(280) supervisor_comment?: string
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ export const parseOptionalNumber = (value: unknown, field: string) => {
|
|||
|
||||
//makes sure that comments are the right length the date is of Date type
|
||||
export const normalizeAndParseExpenseDto = async (dto: ExpenseDto): Promise<Result<NormalizedExpense, string>> => {
|
||||
const attachment = parseOptionalNumber(dto.attachment, "attachment");
|
||||
const mileage = parseOptionalNumber(dto.mileage, "mileage");
|
||||
const amount = parseOptionalNumber(dto.amount, "amount");
|
||||
|
||||
|
|
@ -35,7 +34,6 @@ export const normalizeAndParseExpenseDto = async (dto: ExpenseDto): Promise<Resu
|
|||
comment,
|
||||
supervisor_comment,
|
||||
amount,
|
||||
attachment,
|
||||
mileage,
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -63,7 +63,6 @@ export class ExpenseCreateService {
|
|||
date: toStringFromDate(expense.date),
|
||||
amount: expense.amount?.toNumber() ?? undefined,
|
||||
mileage: expense.mileage?.toNumber(),
|
||||
attachment: expense.attachment ?? undefined,
|
||||
supervisor_comment: expense.supervisor_comment ?? undefined,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -64,7 +64,6 @@ export class ExpenseUpdateService {
|
|||
date: toStringFromDate(expense.date),
|
||||
amount: expense.amount?.toNumber(),
|
||||
mileage: expense.mileage?.toNumber(),
|
||||
attachment: expense.attachment ?? undefined,
|
||||
supervisor_comment: expense.supervisor_comment ?? undefined,
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -84,7 +84,7 @@ export class GetTimesheetsOverviewService {
|
|||
include: {
|
||||
employee: { include: { user: true } },
|
||||
shift: { include: { bank_code: true }, orderBy: { start_time: 'asc' } },
|
||||
expense: { include: { bank_code: true, attachment_record: true }, orderBy: [{ date: 'asc' }, { bank_code_id: 'desc' }] },
|
||||
expense: { include: { bank_code: true }, orderBy: [{ date: 'asc' }, { bank_code_id: 'desc' }] },
|
||||
},
|
||||
orderBy: { start_date: 'asc' },
|
||||
});
|
||||
|
|
@ -98,7 +98,7 @@ export class GetTimesheetsOverviewService {
|
|||
include: {
|
||||
employee: { include: { user: true } },
|
||||
shift: { include: { bank_code: true } },
|
||||
expense: { include: { bank_code: true, attachment_record: true } },
|
||||
expense: { include: { bank_code: true } },
|
||||
},
|
||||
});
|
||||
if (row) return row;
|
||||
|
|
@ -116,7 +116,7 @@ export class GetTimesheetsOverviewService {
|
|||
include: {
|
||||
employee: { include: { user: true } },
|
||||
shift: { include: { bank_code: true } },
|
||||
expense: { include: { bank_code: true, attachment_record: true, } },
|
||||
expense: { include: { bank_code: true } },
|
||||
},
|
||||
});
|
||||
return row!;
|
||||
|
|
|
|||
|
|
@ -67,7 +67,6 @@ export class Expense {
|
|||
@IsString() comment: string;
|
||||
@Type(() => Number) @IsOptional() amount?: number;
|
||||
@Type(() => Number) @IsOptional() mileage?: number;
|
||||
@IsString() @IsOptional() attachment?: string;
|
||||
@IsOptional() @IsInt() id?: number | null;
|
||||
@IsString() @IsOptional() supervisor_comment?: string | null;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ export const mapOneTimesheet = async (timesheet: Prisma.TimesheetsGetPayload<{
|
|||
include: {
|
||||
employee: { include: { user } },
|
||||
shift: { include: { bank_code }, orderBy: { start_time: 'asc' } },
|
||||
expense: { include: { bank_code, attachment_record } },
|
||||
expense: { include: { bank_code } },
|
||||
}
|
||||
}>): Promise<Timesheet> => {
|
||||
//converts string to UTC date format
|
||||
|
|
@ -23,7 +23,7 @@ export const mapOneTimesheet = async (timesheet: Prisma.TimesheetsGetPayload<{
|
|||
shifts_by_date.set(date_string, arr);
|
||||
}
|
||||
//map of expenses by days
|
||||
const expenses_by_date = new Map<string, Prisma.ExpensesGetPayload<{ include: { bank_code: {}, attachment_record } }>[]>();
|
||||
const expenses_by_date = new Map<string, Prisma.ExpensesGetPayload<{ include: { bank_code: {} } }>[]>();
|
||||
for (const expense of timesheet.expense) {
|
||||
const date_string = toStringFromDate(expense.date);
|
||||
const arr = expenses_by_date.get(date_string) ?? [];
|
||||
|
|
@ -60,7 +60,6 @@ export const mapOneTimesheet = async (timesheet: Prisma.TimesheetsGetPayload<{
|
|||
mileage: expense.mileage != null ? Number(expense.mileage) : undefined,
|
||||
id: expense.id ?? null,
|
||||
timesheet_id: expense.timesheet_id,
|
||||
attachment: expense.attachment_record ? String(expense.attachment_record.id) : undefined,
|
||||
is_approved: expense.is_approved ?? false,
|
||||
comment: expense.comment ?? '',
|
||||
supervisor_comment: expense.supervisor_comment,
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ export const expense_select = {
|
|||
bank_code: {
|
||||
select: { type: true, id: true }
|
||||
},
|
||||
attachment: true,
|
||||
date: true,
|
||||
amount: true,
|
||||
mileage: true,
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user