From 6316dccf76a00c05c9663b3cdca4759994f34ec8 Mon Sep 17 00:00:00 2001 From: mertalev <101130780+mertalev@users.noreply.github.com> Date: Wed, 12 Jun 2024 19:28:39 -0400 Subject: [PATCH] use preview for videos --- server/src/services/person.service.spec.ts | 26 ++++++ server/src/services/person.service.ts | 100 ++++++++++++--------- server/test/fixtures/asset.stub.ts | 2 + 3 files changed, 88 insertions(+), 40 deletions(-) diff --git a/server/src/services/person.service.spec.ts b/server/src/services/person.service.spec.ts index 56447c8d203a3..bb76bc38a3d66 100644 --- a/server/src/services/person.service.spec.ts +++ b/server/src/services/person.service.spec.ts @@ -949,6 +949,32 @@ describe(PersonService.name, () => { }, ); }); + + it('should use preview path for videos', async () => { + personMock.getById.mockResolvedValue({ ...personStub.primaryPerson, faceAssetId: faceStub.end.assetId }); + personMock.getFaceByIdWithAssets.mockResolvedValue(faceStub.end); + assetMock.getById.mockResolvedValue(assetStub.video); + mediaMock.getImageDimensions.mockResolvedValue({ width: 2560, height: 1440 }); + + await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id }); + + expect(mediaMock.generateThumbnail).toHaveBeenCalledWith( + assetStub.video.previewPath, + 'upload/thumbs/admin_id/pe/rs/person-1.jpeg', + { + format: 'jpeg', + size: 250, + quality: 80, + colorspace: Colorspace.P3, + crop: { + left: 1741, + top: 851, + width: 588, + height: 588, + }, + }, + ); + }); }); describe('mergePerson', () => { diff --git a/server/src/services/person.service.ts b/server/src/services/person.service.ts index 6d3e4c34a167c..e3e78b48f2122 100644 --- a/server/src/services/person.service.ts +++ b/server/src/services/person.service.ts @@ -22,6 +22,7 @@ import { mapFaces, mapPerson, } from 'src/dtos/person.dto'; +import { AssetEntity, AssetType } from 'src/entities/asset.entity'; import { PersonPathType } from 'src/entities/move.entity'; import { PersonEntity } from 'src/entities/person.entity'; import { IAccessRepository } from 'src/interfaces/access.interface'; @@ -39,7 +40,7 @@ import { QueueName, } from 'src/interfaces/job.interface'; import { ILoggerRepository } from 'src/interfaces/logger.interface'; -import { IMachineLearningRepository } from 'src/interfaces/machine-learning.interface'; +import { BoundingBox, IMachineLearningRepository } from 'src/interfaces/machine-learning.interface'; import { CropOptions, IMediaRepository, ImageDimensions } from 'src/interfaces/media.interface'; import { IMoveRepository } from 'src/interfaces/move.interface'; import { IPersonRepository, UpdateFacesData } from 'src/interfaces/person.interface'; @@ -509,61 +510,30 @@ export class PersonService { boundingBoxX2: x2, boundingBoxY1: y1, boundingBoxY2: y2, - imageWidth, - imageHeight, + imageWidth: oldWidth, + imageHeight: oldHeight, } = face; const asset = await this.assetRepository.getById(assetId, { exifInfo: true }); - if (!asset?.exifInfo?.exifImageHeight || !asset.exifInfo.exifImageWidth) { - this.logger.error(`Could not generate person thumbnail: asset ${assetId} dimensions are unknown`); + if (!asset) { + this.logger.error(`Could not generate person thumbnail: asset ${assetId} does not exist`); return JobStatus.FAILED; } - this.logger.verbose(`Cropping face for person: ${person.id}`); + const { width, height, inputPath } = await this.getInputDimensions(asset); + const thumbnailPath = StorageCore.getPersonThumbnailPath(person); this.storageCore.ensureFolders(thumbnailPath); - const { width: exifWidth, height: exifHeight } = this.withOrientation(asset.exifInfo.orientation as Orientation, { - width: asset.exifInfo.exifImageWidth, - height: asset.exifInfo.exifImageHeight, - }); - - const widthScale = exifWidth / imageWidth; - const heightScale = exifHeight / imageHeight; - - const halfWidth = (widthScale * (x2 - x1)) / 2; - const halfHeight = (heightScale * (y2 - y1)) / 2; - - const middleX = Math.round(widthScale * x1 + halfWidth); - const middleY = Math.round(heightScale * y1 + halfHeight); - - // zoom out 10% - const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1); - - // get the longest distance from the center of the image without overflowing - const newHalfSize = Math.min( - middleX - Math.max(0, middleX - targetHalfSize), - middleY - Math.max(0, middleY - targetHalfSize), - Math.min(exifWidth - 1, middleX + targetHalfSize) - middleX, - Math.min(exifHeight - 1, middleY + targetHalfSize) - middleY, - ); - - const cropOptions: CropOptions = { - left: middleX - newHalfSize, - top: middleY - newHalfSize, - width: newHalfSize * 2, - height: newHalfSize * 2, - }; - const thumbnailOptions = { format: ImageFormat.JPEG, size: FACE_THUMBNAIL_SIZE, colorspace: image.colorspace, quality: image.quality, - crop: cropOptions, + crop: this.getCrop({ old: { width: oldWidth, height: oldHeight }, new: { width, height } }, { x1, y1, x2, y2 }), } as const; - await this.mediaRepository.generateThumbnail(asset.originalPath, thumbnailPath, thumbnailOptions); + await this.mediaRepository.generateThumbnail(inputPath, thumbnailPath, thumbnailOptions); await this.repository.update({ id: person.id, thumbnailPath }); return JobStatus.SUCCESS; @@ -631,6 +601,27 @@ export class PersonService { return person; } + private async getInputDimensions(asset: AssetEntity): Promise { + if (!asset.exifInfo?.exifImageHeight || !asset.exifInfo.exifImageWidth) { + throw new Error(`Asset ${asset.id} dimensions are unknown`); + } + + if (!asset.previewPath) { + throw new Error(`Asset ${asset.id} has no preview path`); + } + + if (asset.type === AssetType.IMAGE) { + const { width, height } = this.withOrientation(asset.exifInfo.orientation as Orientation, { + width: asset.exifInfo.exifImageWidth, + height: asset.exifInfo.exifImageHeight, + }); + return { width, height, inputPath: asset.originalPath }; + } + + const { width, height } = await this.mediaRepository.getImageDimensions(asset.previewPath); + return { width, height, inputPath: asset.previewPath }; + } + private withOrientation(orientation: Orientation, { width, height }: ImageDimensions): ImageDimensions { switch (orientation) { case Orientation.MirrorHorizontalRotate270CW: @@ -644,4 +635,33 @@ export class PersonService { } } } + + private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions { + const widthScale = dims.new.width / dims.old.width; + const heightScale = dims.new.height / dims.old.height; + + const halfWidth = (widthScale * (x2 - x1)) / 2; + const halfHeight = (heightScale * (y2 - y1)) / 2; + + const middleX = Math.round(widthScale * x1 + halfWidth); + const middleY = Math.round(heightScale * y1 + halfHeight); + + // zoom out 10% + const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1); + + // get the longest distance from the center of the image without overflowing + const newHalfSize = Math.min( + middleX - Math.max(0, middleX - targetHalfSize), + middleY - Math.max(0, middleY - targetHalfSize), + Math.min(dims.new.width - 1, middleX + targetHalfSize) - middleX, + Math.min(dims.new.height - 1, middleY + targetHalfSize) - middleY, + ); + + return { + left: middleX - newHalfSize, + top: middleY - newHalfSize, + width: newHalfSize * 2, + height: newHalfSize * 2, + }; + } } diff --git a/server/test/fixtures/asset.stub.ts b/server/test/fixtures/asset.stub.ts index 12c4e7606e80b..01d5e8c119e2e 100644 --- a/server/test/fixtures/asset.stub.ts +++ b/server/test/fixtures/asset.stub.ts @@ -436,6 +436,8 @@ export const assetStub = { sidecarPath: null, exifInfo: { fileSizeInByte: 100_000, + exifImageHeight: 2160, + exifImageWidth: 3840, } as ExifEntity, deletedAt: null, duplicateId: null,