Source Code added

This commit is contained in:
Fr4nz D13trich 2026-02-02 15:06:40 +01:00
parent 800376eafd
commit 9efa9bc6dd
3912 changed files with 754770 additions and 2 deletions

152
server/test/fixtures/album.stub.ts vendored Normal file
View file

@ -0,0 +1,152 @@
import { AlbumUserRole, AssetOrder } from 'src/enum';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { userStub } from 'test/fixtures/user.stub';
export const albumStub = {
empty: Object.freeze({
id: 'album-1',
albumName: 'Empty album',
description: '',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
assets: [],
albumThumbnailAsset: null,
albumThumbnailAssetId: null,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
sharedWithUser: Object.freeze({
id: 'album-2',
albumName: 'Empty album shared with user',
description: '',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
assets: [],
albumThumbnailAsset: null,
albumThumbnailAssetId: null,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [
{
user: userStub.user1,
role: AlbumUserRole.Editor,
},
],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
sharedWithMultiple: Object.freeze({
id: 'album-3',
albumName: 'Empty album shared with users',
description: '',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
assets: [],
albumThumbnailAsset: null,
albumThumbnailAssetId: null,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [
{
user: userStub.user1,
role: AlbumUserRole.Editor,
},
{
user: userStub.user2,
role: AlbumUserRole.Editor,
},
],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
sharedWithAdmin: Object.freeze({
id: 'album-3',
albumName: 'Empty album shared with admin',
description: '',
ownerId: authStub.user1.user.id,
owner: userStub.user1,
assets: [],
albumThumbnailAsset: null,
albumThumbnailAssetId: null,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [
{
user: userStub.admin,
role: AlbumUserRole.Editor,
},
],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
oneAsset: Object.freeze({
id: 'album-4',
albumName: 'Album with one asset',
description: '',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
assets: [assetStub.image],
albumThumbnailAsset: null,
albumThumbnailAssetId: null,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
twoAssets: Object.freeze({
id: 'album-4a',
albumName: 'Album with two assets',
description: '',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
assets: [assetStub.image, assetStub.withLocation],
albumThumbnailAsset: assetStub.image,
albumThumbnailAssetId: assetStub.image.id,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
emptyWithValidThumbnail: Object.freeze({
id: 'album-5',
albumName: 'Empty album with valid thumbnail',
description: '',
ownerId: authStub.admin.user.id,
owner: userStub.admin,
assets: [],
albumThumbnailAsset: assetStub.image,
albumThumbnailAssetId: assetStub.image.id,
createdAt: new Date(),
updatedAt: new Date(),
deletedAt: null,
sharedLinks: [],
albumUsers: [],
isActivityEnabled: true,
order: AssetOrder.Desc,
updateId: '42',
}),
};

1133
server/test/fixtures/asset.stub.ts vendored Normal file

File diff suppressed because it is too large Load diff

56
server/test/fixtures/auth.stub.ts vendored Normal file
View file

@ -0,0 +1,56 @@
import { AuthSession } from 'src/database';
import { AuthDto } from 'src/dtos/auth.dto';
const authUser = {
admin: {
id: 'admin_id',
name: 'admin',
email: 'admin@test.com',
isAdmin: true,
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
},
user1: {
id: 'user-id',
name: 'User 1',
email: 'immich@test.com',
isAdmin: false,
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
},
};
export const authStub = {
admin: Object.freeze<AuthDto>({ user: authUser.admin }),
user1: Object.freeze<AuthDto>({
user: authUser.user1,
session: {
id: 'token-id',
} as AuthSession,
}),
user2: Object.freeze<AuthDto>({
user: {
id: 'user-2',
name: 'User 2',
email: 'user2@immich.cloud',
isAdmin: false,
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
},
session: {
id: 'token-id',
} as AuthSession,
}),
adminSharedLink: Object.freeze({
user: authUser.admin,
sharedLink: {
id: '123',
showExif: true,
allowDownload: true,
allowUpload: true,
expiresAt: null,
password: null,
userId: '42',
},
}),
};

160
server/test/fixtures/face.stub.ts vendored Normal file
View file

@ -0,0 +1,160 @@
import { SourceType } from 'src/enum';
import { assetStub } from 'test/fixtures/asset.stub';
import { personStub } from 'test/fixtures/person.stub';
export const faceStub = {
face1: Object.freeze({
id: 'assetFaceId1',
assetId: assetStub.image.id,
asset: {
...assetStub.image,
libraryId: null,
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
stackId: null,
},
personId: personStub.withName.id,
person: personStub.withName,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 1,
boundingBoxY2: 1,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.MachineLearning,
faceSearch: { faceId: 'assetFaceId1', embedding: '[1, 2, 3, 4]' },
deletedAt: new Date(),
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
primaryFace1: Object.freeze({
id: 'assetFaceId2',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: personStub.primaryPerson.id,
person: personStub.primaryPerson,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 1,
boundingBoxY2: 1,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.MachineLearning,
faceSearch: { faceId: 'assetFaceId2', embedding: '[1, 2, 3, 4]' },
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
mergeFace1: Object.freeze({
id: 'assetFaceId3',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: personStub.mergePerson.id,
person: personStub.mergePerson,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 1,
boundingBoxY2: 1,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.MachineLearning,
faceSearch: { faceId: 'assetFaceId3', embedding: '[1, 2, 3, 4]' },
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
noPerson1: Object.freeze({
id: 'assetFaceId8',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: null,
person: null,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 1,
boundingBoxY2: 1,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.MachineLearning,
faceSearch: { faceId: 'assetFaceId8', embedding: '[1, 2, 3, 4]' },
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
noPerson2: Object.freeze({
id: 'assetFaceId9',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: null,
person: null,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 1,
boundingBoxY2: 1,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.MachineLearning,
faceSearch: { faceId: 'assetFaceId9', embedding: '[1, 2, 3, 4]' },
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
fromExif1: Object.freeze({
id: 'assetFaceId9',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: personStub.randomPerson.id,
person: personStub.randomPerson,
boundingBoxX1: 100,
boundingBoxY1: 100,
boundingBoxX2: 200,
boundingBoxY2: 200,
imageHeight: 500,
imageWidth: 400,
sourceType: SourceType.Exif,
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
fromExif2: Object.freeze({
id: 'assetFaceId9',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: personStub.randomPerson.id,
person: personStub.randomPerson,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 1,
boundingBoxY2: 1,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.Exif,
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
withBirthDate: Object.freeze({
id: 'assetFaceId10',
assetId: assetStub.image.id,
asset: assetStub.image,
personId: personStub.withBirthDate.id,
person: personStub.withBirthDate,
boundingBoxX1: 0,
boundingBoxY1: 0,
boundingBoxX2: 1,
boundingBoxY2: 1,
imageHeight: 1024,
imageWidth: 1024,
sourceType: SourceType.MachineLearning,
deletedAt: null,
updatedAt: new Date('2023-01-01T00:00:00Z'),
updateId: '0d1173e3-4d80-4d76-b41e-57d56de21125',
isVisible: true,
}),
};

31
server/test/fixtures/file.stub.ts vendored Normal file
View file

@ -0,0 +1,31 @@
export const fileStub = {
livePhotoStill: Object.freeze({
uuid: 'random-uuid',
originalPath: 'fake_path/asset_1.jpeg',
checksum: Buffer.from('file hash', 'utf8'),
originalName: 'asset_1.jpeg',
size: 42,
}),
livePhotoMotion: Object.freeze({
uuid: 'live-photo-motion-asset',
originalPath: 'fake_path/asset_1.mp4',
checksum: Buffer.from('live photo file hash', 'utf8'),
originalName: 'asset_1.mp4',
size: 69,
}),
photo: Object.freeze({
uuid: 'photo',
originalPath: 'fake_path/photo1.jpeg',
mimeType: 'image/jpeg',
checksum: Buffer.from('photo file hash', 'utf8'),
originalName: 'photo1.jpeg',
size: 24,
}),
photoSidecar: Object.freeze({
uuid: 'photo-sidecar',
originalPath: 'fake_path/photo1.jpeg.xmp',
originalName: 'photo1.jpeg.xmp',
checksum: Buffer.from('photo-sidecar file hash', 'utf8'),
size: 96,
}),
};

275
server/test/fixtures/media.stub.ts vendored Normal file
View file

@ -0,0 +1,275 @@
import { AudioStreamInfo, VideoFormat, VideoInfo, VideoStreamInfo } from 'src/types';
const probeStubDefaultFormat: VideoFormat = {
formatName: 'mov,mp4,m4a,3gp,3g2,mj2',
formatLongName: 'QuickTime / MOV',
duration: 0,
bitrate: 0,
};
const probeStubDefaultVideoStream: VideoStreamInfo[] = [
{
index: 0,
height: 1080,
width: 1920,
codecName: 'hevc',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
pixelFormat: 'yuv420p',
},
];
const probeStubDefaultAudioStream: AudioStreamInfo[] = [{ index: 3, codecName: 'mp3', bitrate: 100 }];
const probeStubDefault: VideoInfo = {
format: probeStubDefaultFormat,
videoStreams: probeStubDefaultVideoStream,
audioStreams: probeStubDefaultAudioStream,
};
export const probeStub = {
noVideoStreams: Object.freeze<VideoInfo>({ ...probeStubDefault, videoStreams: [] }),
noAudioStreams: Object.freeze<VideoInfo>({ ...probeStubDefault, audioStreams: [] }),
multipleVideoStreams: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 1080,
width: 400,
codecName: 'hevc',
frameCount: 1,
rotation: 0,
isHDR: false,
bitrate: 100,
pixelFormat: 'yuv420p',
},
{
index: 1,
height: 1080,
width: 400,
codecName: 'hevc',
frameCount: 2,
rotation: 0,
isHDR: false,
bitrate: 101,
pixelFormat: 'yuv420p',
},
{
index: 2,
height: 1080,
width: 400,
codecName: 'h7000',
frameCount: 3,
rotation: 0,
isHDR: false,
bitrate: 99,
pixelFormat: 'yuv420p',
},
],
}),
multipleAudioStreams: Object.freeze<VideoInfo>({
...probeStubDefault,
audioStreams: [
{ index: 0, codecName: 'mp3', bitrate: 100 },
{ index: 1, codecName: 'mp3', bitrate: 101 },
{ index: 2, codecName: 'mp3', bitrate: 102 },
],
}),
noHeight: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 0,
width: 400,
codecName: 'hevc',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
pixelFormat: 'yuv420p',
},
],
}),
videoStream2160p: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 2160,
width: 3840,
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
pixelFormat: 'yuv420p',
},
],
}),
videoStream40Mbps: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [{ ...probeStubDefaultVideoStream[0], bitrate: 40_000_000 }],
}),
videoStreamMTS: Object.freeze<VideoInfo>({
...probeStubDefault,
format: {
...probeStubDefaultFormat,
formatName: 'mpegts',
},
}),
videoStreamHDR: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 480,
width: 480,
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: true,
bitrate: 0,
pixelFormat: 'yuv420p10le',
},
],
}),
videoStream10Bit: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 480,
width: 480,
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
pixelFormat: 'yuv420p10le',
},
],
}),
videoStream4K10Bit: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 2160,
width: 3840,
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
pixelFormat: 'yuv420p10le',
},
],
}),
videoStreamVertical2160p: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 2160,
width: 3840,
codecName: 'h264',
frameCount: 100,
rotation: 90,
isHDR: false,
bitrate: 0,
pixelFormat: 'yuv420p',
},
],
}),
videoStreamOddHeight: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 355,
width: 1586,
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
pixelFormat: 'yuv420p',
},
],
}),
videoStreamOddWidth: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
index: 0,
height: 1586,
width: 355,
codecName: 'h264',
frameCount: 100,
rotation: 0,
isHDR: false,
bitrate: 0,
pixelFormat: 'yuv420p',
},
],
}),
audioStreamAac: Object.freeze<VideoInfo>({
...probeStubDefault,
audioStreams: [{ index: 1, codecName: 'aac', bitrate: 100 }],
}),
audioStreamUnknown: Object.freeze<VideoInfo>({
...probeStubDefault,
audioStreams: [
{ index: 0, codecName: 'aac', bitrate: 100 },
{ index: 1, codecName: 'unknown', bitrate: 200 },
],
}),
matroskaContainer: Object.freeze<VideoInfo>({
...probeStubDefault,
format: {
formatName: 'matroska,webm',
formatLongName: 'Matroska / WebM',
duration: 0,
bitrate: 0,
},
}),
videoStreamVp9: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [{ ...probeStubDefaultVideoStream[0], codecName: 'vp9' }],
format: {
formatName: 'matroska,webm',
formatLongName: 'Matroska / WebM',
duration: 0,
bitrate: 0,
},
}),
videoStreamH264: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [{ ...probeStubDefaultVideoStream[0], codecName: 'h264' }],
}),
videoStreamAvi: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [{ ...probeStubDefaultVideoStream[0], codecName: 'h264' }],
format: {
formatName: 'avi',
formatLongName: 'AVI (Audio Video Interleaved)',
duration: 0,
bitrate: 0,
},
}),
videoStreamReserved: Object.freeze<VideoInfo>({
...probeStubDefault,
videoStreams: [
{
...probeStubDefaultVideoStream[0],
colorPrimaries: 'reserved',
colorSpace: 'reserved',
colorTransfer: 'reserved',
},
],
}),
};

View file

@ -0,0 +1,14 @@
import { NotificationLevel, NotificationType } from 'src/enum';
export const notificationStub = {
albumEvent: {
id: 'notification-album-event',
type: NotificationType.AlbumInvite,
description: 'You have been invited to a shared album',
title: 'Album Invitation',
createdAt: new Date('2024-01-01'),
data: { albumId: 'album-id' },
level: NotificationLevel.Success,
readAt: null,
},
};

262
server/test/fixtures/person.stub.ts vendored Normal file
View file

@ -0,0 +1,262 @@
import { AssetType } from 'src/enum';
import { previewFile } from 'test/fixtures/asset.stub';
import { userStub } from 'test/fixtures/user.stub';
const updateId = '0d1173e3-4d80-4d76-b41e-57d56de21125';
export const personStub = {
noName: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
hidden: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: true,
isFavorite: false,
color: 'red',
}),
withName: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 1',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: 'assetFaceId',
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
withBirthDate: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 1',
birthDate: new Date('1976-06-30'),
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
noThumbnail: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
newThumbnail: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '/new/path/to/thumbnail.jpg',
faces: [],
faceAssetId: 'asset-id',
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
primaryPerson: Object.freeze({
id: 'person-1',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 1',
birthDate: null,
thumbnailPath: '/path/to/thumbnail',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
mergePerson: Object.freeze({
id: 'person-2',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 2',
birthDate: null,
thumbnailPath: '/path/to/thumbnail',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
randomPerson: Object.freeze({
id: 'person-3',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: '',
birthDate: null,
thumbnailPath: '/path/to/thumbnail',
faces: [],
faceAssetId: null,
faceAsset: null,
isHidden: false,
isFavorite: false,
color: 'red',
}),
isFavorite: Object.freeze({
id: 'person-4',
createdAt: new Date('2021-01-01'),
updatedAt: new Date('2021-01-01'),
updateId,
ownerId: userStub.admin.id,
name: 'Person 1',
birthDate: null,
thumbnailPath: '/path/to/thumbnail.jpg',
faces: [],
faceAssetId: 'assetFaceId',
faceAsset: null,
isHidden: false,
isFavorite: true,
color: 'red',
}),
};
export const personThumbnailStub = {
newThumbnailStart: Object.freeze({
ownerId: userStub.admin.id,
x1: 5,
y1: 5,
x2: 505,
y2: 505,
oldHeight: 2880,
oldWidth: 2160,
type: AssetType.Image,
originalPath: '/original/path.jpg',
exifOrientation: '1',
previewPath: previewFile.path,
}),
newThumbnailMiddle: Object.freeze({
ownerId: userStub.admin.id,
x1: 100,
y1: 100,
x2: 200,
y2: 200,
oldHeight: 500,
oldWidth: 400,
type: AssetType.Image,
originalPath: '/original/path.jpg',
exifOrientation: '1',
previewPath: previewFile.path,
}),
newThumbnailEnd: Object.freeze({
ownerId: userStub.admin.id,
x1: 300,
y1: 300,
x2: 495,
y2: 495,
oldHeight: 500,
oldWidth: 500,
type: AssetType.Image,
originalPath: '/original/path.jpg',
exifOrientation: '1',
previewPath: previewFile.path,
}),
rawEmbeddedThumbnail: Object.freeze({
ownerId: userStub.admin.id,
x1: 100,
y1: 100,
x2: 200,
y2: 200,
oldHeight: 500,
oldWidth: 400,
type: AssetType.Image,
originalPath: '/original/path.dng',
exifOrientation: '1',
previewPath: previewFile.path,
}),
negativeCoordinate: Object.freeze({
ownerId: userStub.admin.id,
x1: -176,
y1: -230,
x2: 193,
y2: 251,
oldHeight: 1440,
oldWidth: 2162,
type: AssetType.Image,
originalPath: '/original/path.jpg',
exifOrientation: '1',
previewPath: previewFile.path,
}),
overflowingCoordinate: Object.freeze({
ownerId: userStub.admin.id,
x1: 2097,
y1: 0,
x2: 2171,
y2: 152,
oldHeight: 1440,
oldWidth: 2162,
type: AssetType.Image,
originalPath: '/original/path.jpg',
exifOrientation: '1',
previewPath: previewFile.path,
}),
videoThumbnail: Object.freeze({
ownerId: userStub.admin.id,
x1: 100,
y1: 100,
x2: 200,
y2: 200,
oldHeight: 500,
oldWidth: 400,
type: AssetType.Video,
originalPath: '/original/path.mp4',
exifOrientation: '1',
previewPath: previewFile.path,
}),
};

221
server/test/fixtures/shared-link.stub.ts vendored Normal file
View file

@ -0,0 +1,221 @@
import { UserAdmin } from 'src/database';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { SharedLinkResponseDto } from 'src/dtos/shared-link.dto';
import { AssetStatus, AssetType, AssetVisibility, SharedLinkType } from 'src/enum';
import { assetStub } from 'test/fixtures/asset.stub';
import { authStub } from 'test/fixtures/auth.stub';
import { userStub } from 'test/fixtures/user.stub';
const today = new Date();
const tomorrow = new Date();
const yesterday = new Date();
tomorrow.setDate(today.getDate() + 1);
yesterday.setDate(yesterday.getDate() - 1);
const sharedLinkBytes = Buffer.from(
'2c2b646895f84753bff43fb696ad124f3b0faf2a0bd547406f26fa4a76b5c71990092baa536275654b2ab7a191fb21a6d6cd',
'hex',
);
export const sharedLinkStub = {
individual: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
key: sharedLinkBytes,
type: SharedLinkType.Individual,
createdAt: today,
expiresAt: tomorrow,
allowUpload: true,
allowDownload: true,
showExif: true,
albumId: null,
album: null,
description: null,
assets: [assetStub.image],
password: 'password',
slug: null,
}),
valid: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
user: userStub.admin,
key: sharedLinkBytes,
type: SharedLinkType.Album,
createdAt: today,
expiresAt: tomorrow,
allowUpload: true,
allowDownload: true,
showExif: true,
albumId: null,
description: null,
password: null,
assets: [] as MapAsset[],
album: null,
slug: null,
}),
expired: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
user: userStub.admin,
key: sharedLinkBytes,
type: SharedLinkType.Album,
createdAt: today,
expiresAt: yesterday,
allowUpload: true,
allowDownload: true,
showExif: true,
description: null,
password: null,
albumId: null,
assets: [] as MapAsset[],
album: null,
slug: null,
}),
readonlyNoExif: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
key: sharedLinkBytes,
type: SharedLinkType.Individual,
createdAt: today,
expiresAt: tomorrow,
allowUpload: false,
allowDownload: false,
showExif: false,
description: null,
password: null,
assets: [
{
id: 'id_1',
status: AssetStatus.Active,
owner: undefined as unknown as UserAdmin,
ownerId: 'user_id_1',
deviceAssetId: 'device_asset_id_1',
deviceId: 'device_id_1',
type: AssetType.Video,
originalPath: 'fake_path/jpeg',
checksum: Buffer.from('file hash', 'utf8'),
fileModifiedAt: today,
fileCreatedAt: today,
localDateTime: today,
createdAt: today,
updatedAt: today,
isFavorite: false,
isArchived: false,
isExternal: false,
isOffline: false,
files: [],
thumbhash: null,
encodedVideoPath: '',
duration: null,
livePhotoVideo: null,
livePhotoVideoId: null,
originalFileName: 'asset_1.jpeg',
exifInfo: {
projectionType: null,
livePhotoCID: null,
assetId: 'id_1',
description: 'description',
exifImageWidth: 500,
exifImageHeight: 500,
fileSizeInByte: 100,
orientation: 'orientation',
dateTimeOriginal: today,
modifyDate: today,
timeZone: 'America/Los_Angeles',
latitude: 100,
longitude: 100,
city: 'city',
state: 'state',
country: 'country',
make: 'camera-make',
model: 'camera-model',
lensModel: 'fancy',
fNumber: 100,
focalLength: 100,
iso: 100,
exposureTime: '1/16',
fps: 100,
profileDescription: 'sRGB',
bitsPerSample: 8,
colorspace: 'sRGB',
autoStackId: null,
rating: 3,
updatedAt: today,
updateId: '42',
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: 500,
height: 500,
tags: [],
},
sharedLinks: [],
faces: [],
sidecarPath: null,
deletedAt: null,
duplicateId: null,
updateId: '42',
libraryId: null,
stackId: null,
visibility: AssetVisibility.Timeline,
width: 500,
height: 500,
isEdited: false,
},
],
albumId: null,
album: null,
slug: null,
}),
passwordRequired: Object.freeze({
id: '123',
userId: authStub.admin.user.id,
key: sharedLinkBytes,
type: SharedLinkType.Album,
createdAt: today,
expiresAt: tomorrow,
allowUpload: true,
allowDownload: true,
showExif: true,
slug: null,
description: null,
password: 'password',
assets: [],
albumId: null,
album: null,
}),
};
export const sharedLinkResponseStub = {
valid: Object.freeze<SharedLinkResponseDto>({
allowDownload: true,
allowUpload: true,
assets: [],
createdAt: today,
description: null,
password: null,
expiresAt: tomorrow,
id: '123',
key: sharedLinkBytes.toString('base64url'),
showMetadata: true,
type: SharedLinkType.Album,
userId: 'admin_id',
slug: null,
}),
expired: Object.freeze<SharedLinkResponseDto>({
album: undefined,
allowDownload: true,
allowUpload: true,
assets: [],
createdAt: today,
description: null,
password: null,
expiresAt: yesterday,
id: '123',
key: sharedLinkBytes.toString('base64url'),
showMetadata: true,
type: SharedLinkType.Album,
userId: 'admin_id',
slug: null,
}),
};

View file

@ -0,0 +1,120 @@
import { SystemConfig } from 'src/config';
import { DeepPartial } from 'src/types';
export const systemConfigStub = {
enabled: {
oauth: {
enabled: true,
autoRegister: true,
autoLaunch: false,
buttonText: 'OAuth',
},
},
disabled: {
passwordLogin: {
enabled: false,
},
},
oauthEnabled: {
oauth: {
enabled: true,
autoRegister: false,
autoLaunch: false,
buttonText: 'OAuth',
},
},
oauthWithAutoRegister: {
oauth: {
enabled: true,
autoRegister: true,
autoLaunch: false,
buttonText: 'OAuth',
},
},
oauthWithMobileOverride: {
oauth: {
enabled: true,
autoRegister: true,
mobileOverrideEnabled: true,
mobileRedirectUri: 'http://mobile-redirect',
buttonText: 'OAuth',
},
},
oauthWithStorageQuota: {
oauth: {
enabled: true,
autoRegister: true,
defaultStorageQuota: 1,
},
},
libraryWatchEnabled: {
library: {
scan: {
enabled: false,
},
watch: {
enabled: true,
},
},
},
libraryWatchDisabled: {
library: {
scan: {
enabled: false,
},
watch: {
enabled: false,
},
},
},
libraryScan: {
library: {
scan: {
enabled: true,
cronExpression: '0 0 * * *',
},
watch: {
enabled: false,
},
},
},
libraryScanAndWatch: {
library: {
scan: {
enabled: true,
cronExpression: '0 0 * * *',
},
watch: {
enabled: true,
},
},
},
backupEnabled: {
backup: {
database: {
enabled: true,
cronExpression: '0 0 * * *',
keepLastAmount: 1,
},
},
},
machineLearningDisabled: {
machineLearning: {
enabled: false,
},
},
machineLearningEnabled: {
machineLearning: {
enabled: true,
clip: {
modelName: 'ViT-B-16__openai',
enabled: true,
},
},
},
publicUsersDisabled: {
server: {
publicUsers: false,
},
},
} satisfies Record<string, DeepPartial<SystemConfig>>;

71
server/test/fixtures/tag.stub.ts vendored Normal file
View file

@ -0,0 +1,71 @@
import { Tag } from 'src/database';
import { TagResponseDto } from 'src/dtos/tag.dto';
import { newUuidV7 } from 'test/small.factory';
const parent = Object.freeze<Tag>({
id: 'tag-parent',
createdAt: new Date('2021-01-01T00:00:00Z'),
updatedAt: new Date('2021-01-01T00:00:00Z'),
value: 'Parent',
color: null,
parentId: null,
});
const child = Object.freeze<Tag>({
id: 'tag-child',
createdAt: new Date('2021-01-01T00:00:00Z'),
updatedAt: new Date('2021-01-01T00:00:00Z'),
value: 'Parent/Child',
color: null,
parentId: parent.id,
});
const tag = {
id: 'tag-1',
createdAt: new Date('2021-01-01T00:00:00Z'),
updatedAt: new Date('2021-01-01T00:00:00Z'),
value: 'Tag1',
color: null,
parentId: null,
};
const color = {
id: 'tag-1',
createdAt: new Date('2021-01-01T00:00:00Z'),
updatedAt: new Date('2021-01-01T00:00:00Z'),
value: 'Tag1',
color: '#000000',
parentId: null,
};
const upsert = {
userId: 'tag-user',
updateId: newUuidV7(),
};
export const tagStub = {
tag,
tagCreate: { ...tag, ...upsert },
color,
colorCreate: { ...color, ...upsert },
parentUpsert: { ...parent, ...upsert },
childUpsert: { ...child, ...upsert },
};
export const tagResponseStub = {
tag1: Object.freeze<TagResponseDto>({
id: 'tag-1',
createdAt: new Date('2021-01-01T00:00:00Z'),
updatedAt: new Date('2021-01-01T00:00:00Z'),
name: 'Tag1',
value: 'Tag1',
}),
color1: Object.freeze<TagResponseDto>({
id: 'tag-1',
createdAt: new Date('2021-01-01T00:00:00Z'),
updatedAt: new Date('2021-01-01T00:00:00Z'),
color: '#000000',
name: 'Tag1',
value: 'Tag1',
}),
};

58
server/test/fixtures/user.stub.ts vendored Normal file
View file

@ -0,0 +1,58 @@
import { UserAdmin } from 'src/database';
import { UserStatus } from 'src/enum';
import { authStub } from 'test/fixtures/auth.stub';
export const userStub = {
admin: <UserAdmin>{
...authStub.admin.user,
status: UserStatus.Active,
profileChangedAt: new Date('2021-01-01'),
name: 'admin_name',
id: 'admin_id',
storageLabel: 'admin',
oauthId: '',
shouldChangePassword: false,
avatarColor: null,
profileImagePath: '',
createdAt: new Date('2021-01-01'),
deletedAt: null,
updatedAt: new Date('2021-01-01'),
metadata: [],
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
},
user1: <UserAdmin>{
...authStub.user1.user,
status: UserStatus.Active,
profileChangedAt: new Date('2021-01-01'),
name: 'immich_name',
storageLabel: null,
oauthId: '',
shouldChangePassword: false,
avatarColor: null,
profileImagePath: '',
createdAt: new Date('2021-01-01'),
deletedAt: null,
updatedAt: new Date('2021-01-01'),
metadata: [],
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
},
user2: <UserAdmin>{
...authStub.user2.user,
status: UserStatus.Active,
profileChangedAt: new Date('2021-01-01'),
metadata: [],
name: 'immich_name',
storageLabel: null,
oauthId: '',
shouldChangePassword: false,
avatarColor: null,
profileImagePath: '',
createdAt: new Date('2021-01-01'),
deletedAt: null,
updatedAt: new Date('2021-01-01'),
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
},
};

View file

@ -0,0 +1,778 @@
/* eslint-disable @typescript-eslint/no-unsafe-function-type */
import { Insertable, Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { createHash, randomBytes } from 'node:crypto';
import { Stats } from 'node:fs';
import { Writable } from 'node:stream';
import { AssetFace } from 'src/database';
import { AuthDto, LoginResponseDto } from 'src/dtos/auth.dto';
import {
AlbumUserRole,
AssetType,
AssetVisibility,
MemoryType,
SourceType,
SyncEntityType,
SyncRequestType,
} from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { ActivityRepository } from 'src/repositories/activity.repository';
import { AlbumUserRepository } from 'src/repositories/album-user.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { MapRepository } from 'src/repositories/map.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { PluginRepository } from 'src/repositories/plugin.repository';
import { SearchRepository } from 'src/repositories/search.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StackRepository } from 'src/repositories/stack.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SyncCheckpointRepository } from 'src/repositories/sync-checkpoint.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { TagRepository } from 'src/repositories/tag.repository';
import { TelemetryRepository } from 'src/repositories/telemetry.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
import { WorkflowRepository } from 'src/repositories/workflow.repository';
import { DB } from 'src/schema';
import { AlbumTable } from 'src/schema/tables/album.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
import { AssetMetadataTable } from 'src/schema/tables/asset-metadata.table';
import { AssetTable } from 'src/schema/tables/asset.table';
import { FaceSearchTable } from 'src/schema/tables/face-search.table';
import { MemoryTable } from 'src/schema/tables/memory.table';
import { PersonTable } from 'src/schema/tables/person.table';
import { SessionTable } from 'src/schema/tables/session.table';
import { StackTable } from 'src/schema/tables/stack.table';
import { TagAssetTable } from 'src/schema/tables/tag-asset.table';
import { TagTable } from 'src/schema/tables/tag.table';
import { UserTable } from 'src/schema/tables/user.table';
import { BASE_SERVICE_DEPENDENCIES, BaseService } from 'src/services/base.service';
import { MetadataService } from 'src/services/metadata.service';
import { SyncService } from 'src/services/sync.service';
import { UploadFile } from 'src/types';
import { mockEnvData } from 'test/repositories/config.repository.mock';
import { newTelemetryRepositoryMock } from 'test/repositories/telemetry.repository.mock';
import { factory, newDate, newEmbedding, newUuid } from 'test/small.factory';
import { automock, wait } from 'test/utils';
import { Mocked } from 'vitest';
interface ClassConstructor<T = any> extends Function {
new (...args: any[]): T;
}
type MediumTestOptions = {
mock: ClassConstructor<any>[];
real: ClassConstructor<any>[];
database: Kysely<DB>;
};
export const newMediumService = <S extends BaseService>(Service: ClassConstructor<S>, options: MediumTestOptions) => {
const ctx = new MediumTestContext(Service, options);
return { sut: ctx.sut, ctx };
};
export class MediumTestContext<S extends BaseService = BaseService> {
private repoCache: Record<string, any> = {};
private sutDeps: any[];
sut: S;
database: Kysely<DB>;
constructor(
Service: ClassConstructor<S>,
private options: MediumTestOptions,
) {
this.sutDeps = this.makeDeps(options);
this.sut = new Service(...this.sutDeps);
this.database = options.database;
}
private makeDeps(options: MediumTestOptions) {
const deps = BASE_SERVICE_DEPENDENCIES;
for (const dep of options.mock) {
if (!deps.includes(dep)) {
throw new Error(`Mocked repository ${dep.name} is not a valid dependency`);
}
}
for (const dep of options.real) {
if (!deps.includes(dep)) {
throw new Error(`Real repository ${dep.name} is not a valid dependency`);
}
}
return (deps as ClassConstructor<any>[]).map((dep) => {
if (options.real.includes(dep)) {
return this.get(dep);
}
if (options.mock.includes(dep)) {
return newMockRepository(dep);
}
});
}
get<T>(key: ClassConstructor<T>): T {
if (!this.repoCache[key.name]) {
const real = newRealRepository(key, this.options.database);
this.repoCache[key.name] = real;
}
return this.repoCache[key.name];
}
getMock<T, R = Mocked<T>>(key: ClassConstructor<T>): R {
const index = BASE_SERVICE_DEPENDENCIES.indexOf(key as any);
if (index === -1 || !this.options.mock.includes(key)) {
throw new Error(`getMock called with a key that is not a mock: ${key.name}`);
}
return this.sutDeps[index] as R;
}
async newUser(dto: Partial<Insertable<UserTable>> = {}) {
const user = mediumFactory.userInsert(dto);
const result = await this.get(UserRepository).create(user);
return { user, result };
}
async newPartner(dto: { sharedById: string; sharedWithId: string; inTimeline?: boolean }) {
const partner = { inTimeline: true, ...dto };
const result = await this.get(PartnerRepository).create(partner);
return { partner, result };
}
async newStack(dto: Omit<Insertable<StackTable>, 'primaryAssetId'>, assetIds: string[]) {
const date = factory.date();
const stack = {
id: factory.uuid(),
createdAt: date,
updatedAt: date,
...dto,
};
const result = await this.get(StackRepository).create(stack, assetIds);
return { stack: { ...stack, primaryAssetId: assetIds[0] }, result };
}
async newAsset(dto: Partial<Insertable<AssetTable>> = {}) {
const asset = mediumFactory.assetInsert(dto);
const result = await this.get(AssetRepository).create(asset);
return { asset, result };
}
async newMetadata(dto: Insertable<AssetMetadataTable>) {
const { assetId, ...item } = dto;
const result = await this.get(AssetRepository).upsertMetadata(assetId, [item]);
return { metadata: dto, result };
}
async newAssetFile(dto: Insertable<AssetFileTable>) {
const result = await this.get(AssetRepository).upsertFile(dto);
return { result };
}
async newAssetFace(dto: Partial<Insertable<AssetFace>> & { assetId: string }) {
const assetFace = mediumFactory.assetFaceInsert(dto);
const result = await this.get(PersonRepository).createAssetFace(assetFace);
return { assetFace, result };
}
async newMemory(dto: Partial<Insertable<MemoryTable>> = {}) {
const memory = mediumFactory.memoryInsert(dto);
const result = await this.get(MemoryRepository).create(memory, new Set<string>());
return { memory, result };
}
async newMemoryAsset(dto: { memoryId: string; assetId: string }) {
const result = await this.get(MemoryRepository).addAssetIds(dto.memoryId, [dto.assetId]);
return { memoryAsset: dto, result };
}
async newExif(dto: Insertable<AssetExifTable>) {
const result = await this.get(AssetRepository).upsertExif(dto, { lockedPropertiesBehavior: 'override' });
return { result };
}
async newAlbum(dto: Insertable<AlbumTable>) {
const album = mediumFactory.albumInsert(dto);
const result = await this.get(AlbumRepository).create(album, [], []);
return { album, result };
}
async newAlbumAsset(albumAsset: { albumId: string; assetId: string }) {
const result = await this.get(AlbumRepository).addAssetIds(albumAsset.albumId, [albumAsset.assetId]);
return { albumAsset, result };
}
async newAlbumUser(dto: { albumId: string; userId: string; role?: AlbumUserRole }) {
const { albumId, userId, role = AlbumUserRole.Editor } = dto;
const result = await this.get(AlbumUserRepository).create({ albumId, userId, role });
return { albumUser: { albumId, userId, role }, result };
}
async newJobStatus(dto: Partial<Insertable<AssetJobStatusTable>> & { assetId: string }) {
const jobStatus = mediumFactory.assetJobStatusInsert({ assetId: dto.assetId });
const result = await this.get(AssetRepository).upsertJobStatus(jobStatus);
return { jobStatus, result };
}
async newPerson(dto: Partial<Insertable<PersonTable>> & { ownerId: string }) {
const person = mediumFactory.personInsert(dto);
const result = await this.get(PersonRepository).create(person);
return { person, result };
}
async newSession(dto: Partial<Insertable<SessionTable>> & { userId: string }) {
const session = mediumFactory.sessionInsert(dto);
const result = await this.get(SessionRepository).create(session);
return { session, result };
}
async newSyncAuthUser() {
const { user } = await this.newUser();
const { session } = await this.newSession({ userId: user.id });
const auth = factory.auth({
session,
user: {
id: user.id,
name: user.name,
email: user.email,
},
});
return {
auth,
session,
user,
};
}
async newTagAsset(tagBulkAssets: { tagIds: string[]; assetIds: string[] }) {
const tagsAssets: Insertable<TagAssetTable>[] = [];
for (const tagId of tagBulkAssets.tagIds) {
for (const assetId of tagBulkAssets.assetIds) {
tagsAssets.push({ tagId, assetId });
}
}
const result = await this.get(TagRepository).upsertAssetIds(tagsAssets);
return { tagsAssets, result };
}
}
export class SyncTestContext extends MediumTestContext<SyncService> {
constructor(database: Kysely<DB>) {
super(SyncService, {
database,
real: [SyncRepository, SyncCheckpointRepository, SessionRepository],
mock: [LoggingRepository],
});
}
async syncStream(auth: AuthDto, types: SyncRequestType[], reset?: boolean) {
const stream = mediumFactory.syncStream();
// Wait for 2ms to ensure all updates are available and account for setTimeout inaccuracy
await wait(2);
await this.sut.stream(auth, stream, { types, reset });
return stream.getResponse();
}
async assertSyncIsComplete(auth: AuthDto, types: SyncRequestType[]) {
await expect(this.syncStream(auth, types)).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
}
async syncAckAll(auth: AuthDto, response: Array<{ type: string; ack: string }>) {
const acks: Record<string, string> = {};
const syncAcks: string[] = [];
for (const { type, ack } of response) {
if (type === SyncEntityType.SyncAckV1) {
syncAcks.push(ack);
continue;
}
acks[type] = ack;
}
await this.sut.setAcks(auth, { acks: [...Object.values(acks), ...syncAcks] });
}
}
const mockDate = new Date('2024-06-01T12:00:00.000Z');
const mockStats = {
mtime: mockDate,
atime: mockDate,
ctime: mockDate,
birthtime: mockDate,
atimeMs: 0,
mtimeMs: 0,
ctimeMs: 0,
birthtimeMs: 0,
};
export class ExifTestContext extends MediumTestContext<MetadataService> {
constructor(database: Kysely<DB>) {
super(MetadataService, {
database,
real: [AssetRepository, AssetJobRepository, MetadataRepository, SystemMetadataRepository, TagRepository],
mock: [ConfigRepository, EventRepository, LoggingRepository, MapRepository, StorageRepository],
});
this.getMock(ConfigRepository).getEnv.mockReturnValue(mockEnvData({}));
this.getMock(EventRepository).emit.mockResolvedValue();
this.getMock(MapRepository).reverseGeocode.mockResolvedValue({ country: null, state: null, city: null });
this.getMock(StorageRepository).stat.mockResolvedValue(mockStats as Stats);
}
getMockStats() {
return mockStats;
}
getGps(assetId: string) {
return this.database
.selectFrom('asset_exif')
.select(['latitude', 'longitude'])
.where('assetId', '=', assetId)
.executeTakeFirstOrThrow();
}
getTags(assetId: string) {
return this.database
.selectFrom('tag')
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.where('tag_asset.assetId', '=', assetId)
.selectAll()
.execute();
}
getDates(assetId: string) {
return this.database
.selectFrom('asset')
.innerJoin('asset_exif', 'asset.id', 'asset_exif.assetId')
.where('id', '=', assetId)
.select(['asset.fileCreatedAt', 'asset.localDateTime', 'asset_exif.dateTimeOriginal', 'asset_exif.timeZone'])
.executeTakeFirstOrThrow();
}
}
const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
switch (key) {
case AccessRepository:
case AlbumRepository:
case AlbumUserRepository:
case ActivityRepository:
case AssetRepository:
case AssetEditRepository:
case AssetJobRepository:
case MemoryRepository:
case NotificationRepository:
case OcrRepository:
case PartnerRepository:
case PersonRepository:
case PluginRepository:
case SearchRepository:
case SessionRepository:
case SharedLinkRepository:
case SharedLinkAssetRepository:
case StackRepository:
case SyncRepository:
case SyncCheckpointRepository:
case SystemMetadataRepository:
case UserRepository:
case VersionHistoryRepository:
case WorkflowRepository: {
return new key(db);
}
case ConfigRepository:
case CryptoRepository: {
return new key();
}
case DatabaseRepository: {
return new key(db, LoggingRepository.create(), new ConfigRepository());
}
case EmailRepository: {
return new key(LoggingRepository.create());
}
case MetadataRepository: {
return new key(LoggingRepository.create());
}
case StorageRepository: {
return new key(LoggingRepository.create());
}
case TagRepository: {
return new key(db, LoggingRepository.create());
}
case LoggingRepository as unknown as ClassConstructor<LoggingRepository>: {
return new key() as unknown as T;
}
default: {
throw new Error(`Unable to create repository instance for key: ${key?.name || key}`);
}
}
};
const newMockRepository = <T>(key: ClassConstructor<T>) => {
switch (key) {
case ActivityRepository:
case AlbumRepository:
case AssetRepository:
case AssetJobRepository:
case ConfigRepository:
case CryptoRepository:
case MemoryRepository:
case NotificationRepository:
case OcrRepository:
case PartnerRepository:
case PersonRepository:
case PluginRepository:
case SessionRepository:
case SyncRepository:
case SyncCheckpointRepository:
case SystemMetadataRepository:
case UserRepository:
case VersionHistoryRepository:
case TagRepository:
case WorkflowRepository: {
return automock(key);
}
case MapRepository: {
return automock(MapRepository, { args: [undefined, undefined, { setContext: () => {} }] });
}
case TelemetryRepository: {
return newTelemetryRepositoryMock();
}
case DatabaseRepository: {
return automock(DatabaseRepository, {
args: [undefined, { setContext: () => {} }, { getEnv: () => ({ database: { vectorExtension: '' } }) }],
});
}
case EmailRepository: {
return automock(EmailRepository, { args: [{ setContext: () => {} }] });
}
case EventRepository: {
return automock(EventRepository, { args: [undefined, undefined, { setContext: () => {} }] });
}
case JobRepository: {
return automock(JobRepository, {
args: [
undefined,
undefined,
undefined,
{
setContext: () => {},
},
],
});
}
case LoggingRepository as unknown as ClassConstructor<T>: {
const configMock = { getEnv: () => ({ noColor: false }) };
return automock(LoggingRepository, { args: [undefined, configMock], strict: false });
}
case MachineLearningRepository: {
return automock(MachineLearningRepository, { args: [{ setContext: () => {} }] });
}
case StorageRepository: {
return automock(StorageRepository, { args: [{ setContext: () => {} }] });
}
default: {
throw new Error(`Invalid repository key: ${key}`);
}
}
};
const assetInsert = (asset: Partial<Insertable<AssetTable>> = {}) => {
const id = asset.id || newUuid();
const now = newDate();
const defaults: Insertable<AssetTable> = {
deviceAssetId: '',
deviceId: '',
originalFileName: '',
checksum: randomBytes(32),
type: AssetType.Image,
originalPath: '/path/to/something.jpg',
ownerId: 'not-a-valid-uuid',
isFavorite: false,
fileCreatedAt: now,
fileModifiedAt: now,
localDateTime: now,
visibility: AssetVisibility.Timeline,
isEdited: false,
};
return {
...defaults,
...asset,
id,
};
};
const albumInsert = (album: Partial<Insertable<AlbumTable>> & { ownerId: string }) => {
const id = album.id || newUuid();
const defaults: Omit<Insertable<AlbumTable>, 'ownerId'> = {
albumName: 'Album',
};
return {
...defaults,
...album,
id,
};
};
const faceInsert = (face: Partial<Insertable<FaceSearchTable>> & { faceId: string }) => {
const defaults = {
faceId: face.faceId,
embedding: face.embedding || newEmbedding(),
};
return {
...defaults,
...face,
};
};
const assetFaceInsert = (assetFace: Partial<AssetFace> & { assetId: string }) => {
const defaults = {
assetId: assetFace.assetId ?? newUuid(),
boundingBoxX1: assetFace.boundingBoxX1 ?? 0,
boundingBoxX2: assetFace.boundingBoxX2 ?? 1,
boundingBoxY1: assetFace.boundingBoxY1 ?? 0,
boundingBoxY2: assetFace.boundingBoxY2 ?? 1,
deletedAt: assetFace.deletedAt ?? null,
id: assetFace.id ?? newUuid(),
imageHeight: assetFace.imageHeight ?? 10,
imageWidth: assetFace.imageWidth ?? 10,
personId: assetFace.personId ?? null,
sourceType: assetFace.sourceType ?? SourceType.MachineLearning,
isVisible: assetFace.isVisible ?? true,
};
return {
...defaults,
...assetFace,
};
};
const assetJobStatusInsert = (
job: Partial<Insertable<AssetJobStatusTable>> & { assetId: string },
): Insertable<AssetJobStatusTable> => {
const date = DateTime.now().minus({ days: 15 }).toISO();
const defaults: Omit<Insertable<AssetJobStatusTable>, 'assetId'> = {
duplicatesDetectedAt: date,
facesRecognizedAt: date,
metadataExtractedAt: date,
};
return {
...defaults,
...job,
};
};
const personInsert = (person: Partial<Insertable<PersonTable>> & { ownerId: string }) => {
const defaults = {
birthDate: person.birthDate || null,
color: person.color || null,
createdAt: person.createdAt || newDate(),
faceAssetId: person.faceAssetId || null,
id: person.id || newUuid(),
isFavorite: person.isFavorite || false,
isHidden: person.isHidden || false,
name: person.name || 'Test Name',
ownerId: person.ownerId || newUuid(),
thumbnailPath: person.thumbnailPath || '/path/to/thumbnail.jpg',
};
return {
...defaults,
...person,
};
};
const sha256 = (value: string) => createHash('sha256').update(value).digest('base64');
const sessionInsert = ({
id = newUuid(),
userId,
...session
}: Partial<Insertable<SessionTable>> & { userId: string }) => {
const defaults: Insertable<SessionTable> = {
id,
userId,
isPendingSyncReset: false,
token: sha256(id),
};
return {
...defaults,
...session,
id,
};
};
const userInsert = (user: Partial<Insertable<UserTable>> = {}) => {
const id = user.id || newUuid();
const defaults = {
email: `${id}@immich.cloud`,
name: `User ${id}`,
deletedAt: null,
isAdmin: false,
profileImagePath: '',
profileChangedAt: newDate(),
shouldChangePassword: true,
storageLabel: null,
pinCode: null,
oauthId: '',
avatarColor: null,
quotaSizeInBytes: null,
quotaUsageInBytes: 0,
};
return { ...defaults, ...user, id };
};
const memoryInsert = (memory: Partial<Insertable<MemoryTable>> = {}) => {
const id = memory.id || newUuid();
const date = newDate();
const defaults: Insertable<MemoryTable> = {
id,
createdAt: date,
updatedAt: date,
deletedAt: null,
type: MemoryType.OnThisDay,
data: { year: 2025 },
showAt: null,
hideAt: null,
seenAt: null,
isSaved: false,
memoryAt: date,
ownerId: memory.ownerId || newUuid(),
};
return { ...defaults, ...memory, id };
};
const tagInsert = (tag: Partial<Insertable<TagTable>>) => {
const id = tag.id || newUuid();
const defaults: Insertable<TagTable> = {
id,
userId: '',
value: '',
createdAt: newDate(),
updatedAt: newDate(),
color: '',
parentId: null,
updateId: newUuid(),
};
return { ...defaults, ...tag, id };
};
class CustomWritable extends Writable {
private data = '';
_write(chunk: any, encoding: string, callback: () => void) {
this.data += chunk.toString();
callback();
}
getResponse() {
const result = this.data;
return result
.split('\n')
.filter((x) => x.length > 0)
.map((x) => JSON.parse(x));
}
}
const syncStream = () => {
return new CustomWritable();
};
const loginDetails = () => {
return { isSecure: false, clientIp: '', deviceType: '', deviceOS: '', appVersion: null };
};
const loginResponse = (): LoginResponseDto => {
const user = userInsert({});
return {
accessToken: 'access-token',
userId: user.id,
userEmail: user.email,
name: user.name,
profileImagePath: user.profileImagePath,
isAdmin: user.isAdmin,
shouldChangePassword: user.shouldChangePassword,
isOnboarded: false,
};
};
const uploadFile = (file: Partial<UploadFile> = {}) => {
return {
uuid: newUuid(),
checksum: randomBytes(32),
originalPath: '/path/to/file.jpg',
originalName: 'file.jpg',
size: 123_456,
...file,
};
};
export const mediumFactory = {
assetInsert,
assetFaceInsert,
assetJobStatusInsert,
albumInsert,
faceInsert,
personInsert,
sessionInsert,
syncStream,
userInsert,
memoryInsert,
loginDetails,
loginResponse,
tagInsert,
uploadFile,
};

View file

@ -0,0 +1,52 @@
import { Kysely } from 'kysely';
import { ConfigRepository } from 'src/repositories/config.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { DB } from 'src/schema';
import { getKyselyConfig } from 'src/utils/database';
import { GenericContainer, Wait } from 'testcontainers';
const globalSetup = async () => {
const templateName = 'mich';
const postgresContainer = await new GenericContainer('ghcr.io/immich-app/postgres:14-vectorchord0.4.3')
.withExposedPorts(5432)
.withEnvironment({
POSTGRES_PASSWORD: 'postgres',
POSTGRES_USER: 'postgres',
POSTGRES_DB: templateName,
})
.withCommand([
'postgres',
'-c',
'shared_preload_libraries=vchord.so',
'-c',
'max_wal_size=2GB',
'-c',
'shared_buffers=512MB',
'-c',
'fsync=off',
'-c',
'full_page_writes=off',
'-c',
'synchronous_commit=off',
'-c',
'config_file=/var/lib/postgresql/data/postgresql.conf',
])
.withWaitStrategy(Wait.forAll([Wait.forLogMessage('database system is ready to accept connections', 2)]))
.start();
const postgresPort = postgresContainer.getMappedPort(5432);
const postgresUrl = `postgres://postgres:postgres@localhost:${postgresPort}/${templateName}`;
process.env.IMMICH_TEST_POSTGRES_URL = postgresUrl;
const db = new Kysely<DB>(getKyselyConfig({ connectionType: 'url', url: postgresUrl }));
const configRepository = new ConfigRepository();
const logger = LoggingRepository.create();
await new DatabaseRepository(db, logger, configRepository).runMigrations();
await db.destroy();
};
export default globalSetup;

View file

@ -0,0 +1,114 @@
import { expect } from 'vitest';
export const errorDto = {
unauthorized: {
error: 'Unauthorized',
statusCode: 401,
message: 'Authentication required',
correlationId: expect.any(String),
},
forbidden: {
error: 'Forbidden',
statusCode: 403,
message: expect.any(String),
correlationId: expect.any(String),
},
missingPermission: (permission: string) => ({
error: 'Forbidden',
statusCode: 403,
message: `Missing required permission: ${permission}`,
correlationId: expect.any(String),
}),
wrongPassword: {
error: 'Bad Request',
statusCode: 400,
message: 'Wrong password',
correlationId: expect.any(String),
},
invalidToken: {
error: 'Unauthorized',
statusCode: 401,
message: 'Invalid user token',
correlationId: expect.any(String),
},
invalidShareKey: {
error: 'Unauthorized',
statusCode: 401,
message: 'Invalid share key',
correlationId: expect.any(String),
},
invalidSharePassword: {
error: 'Unauthorized',
statusCode: 401,
message: 'Invalid password',
correlationId: expect.any(String),
},
badRequest: (message: any = null) => ({
error: 'Bad Request',
statusCode: 400,
message: message ?? expect.anything(),
}),
noPermission: {
error: 'Bad Request',
statusCode: 400,
message: expect.stringContaining('Not found or no'),
correlationId: expect.any(String),
},
incorrectLogin: {
error: 'Unauthorized',
statusCode: 401,
message: 'Incorrect email or password',
correlationId: expect.any(String),
},
alreadyHasAdmin: {
error: 'Bad Request',
statusCode: 400,
message: 'The server already has an admin',
correlationId: expect.any(String),
},
};
export const signupResponseDto = {
admin: {
avatarColor: expect.any(String),
id: expect.any(String),
name: 'Immich Admin',
email: 'admin@immich.cloud',
storageLabel: 'admin',
profileImagePath: '',
// why? lol
shouldChangePassword: true,
isAdmin: true,
createdAt: expect.any(String),
updatedAt: expect.any(String),
deletedAt: null,
oauthId: '',
quotaUsageInBytes: 0,
quotaSizeInBytes: null,
status: 'active',
license: null,
profileChangedAt: expect.any(String),
},
};
export const loginResponseDto = {
admin: {
accessToken: expect.any(String),
name: 'Immich Admin',
isAdmin: true,
profileImagePath: '',
shouldChangePassword: true,
userEmail: 'admin@immich.cloud',
userId: expect.any(String),
},
};
export const deviceDto = {
current: {
id: expect.any(String),
createdAt: expect.any(String),
updatedAt: expect.any(String),
current: true,
deviceOS: '',
deviceType: '',
},
};

View file

@ -0,0 +1,65 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif date time', () => {
it('should prioritize DateTimeOriginal', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/date-priority-test.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: null,
dateTimeOriginal: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
localDateTime: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
fileCreatedAt: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
});
});
it('should extract GPSDateTime with GPS coordinates ', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/gps-datetime.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: 'America/Los_Angeles',
dateTimeOriginal: DateTime.fromISO('2023-11-15T12:30:00.000Z').toJSDate(),
localDateTime: DateTime.fromISO('2023-11-15T04:30:00.000Z').toJSDate(),
fileCreatedAt: DateTime.fromISO('2023-11-15T12:30:00.000Z').toJSDate(),
});
});
it('should ignore the TimeCreated tag', async () => {
const { ctx, sut, asset } = await setup('metadata/dates/time-created.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
const stats = ctx.getMockStats();
await expect(ctx.getDates(asset.id)).resolves.toEqual({
timeZone: null,
dateTimeOriginal: stats.mtime,
localDateTime: stats.mtime,
fileCreatedAt: stats.mtime,
});
});
});

View file

@ -0,0 +1,31 @@
import { Kysely } from 'kysely';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif gps', () => {
it('should handle empty strings', async () => {
const { ctx, sut, asset } = await setup('metadata/gps-position/empty_gps.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getGps(asset.id)).resolves.toEqual({ latitude: null, longitude: null });
});
});

View file

@ -0,0 +1,34 @@
import { Kysely } from 'kysely';
import { resolve } from 'node:path';
import { DB } from 'src/schema';
import { ExifTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let database: Kysely<DB>;
const setup = async (testAssetPath: string) => {
const ctx = new ExifTestContext(database);
const { user } = await ctx.newUser();
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
return { ctx, sut: ctx.sut, asset };
};
beforeAll(async () => {
database = await getKyselyDB();
});
describe('exif tags', () => {
it('should detect and regular tags', async () => {
const { ctx, sut, asset } = await setup('metadata/tags/picasa.jpg');
await sut.handleMetadataExtraction({ id: asset.id });
await expect(ctx.getTags(asset.id)).resolves.toEqual([
expect.objectContaining({ assetId: asset.id, value: 'Frost', parentId: null }),
expect.objectContaining({ assetId: asset.id, value: 'Yard', parentId: null }),
]);
});
});

View file

@ -0,0 +1,115 @@
import { Kysely } from 'kysely';
import { AssetEditAction, MirrorAxis } from 'src/dtos/editing.dto';
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { DB } from 'src/schema';
import { BaseService } from 'src/services/base.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
const { ctx } = newMediumService(BaseService, {
database: db || defaultDatabase,
real: [],
mock: [LoggingRepository],
});
return { ctx, sut: ctx.get(AssetEditRepository) };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(AssetEditRepository.name, () => {
describe('replaceAll', () => {
it('should set isEdited on insert', async () => {
const { ctx, sut } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: false });
await sut.replaceAll(asset.id, [
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
]);
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: true });
});
it('should set isEdited when inserting multiple edits', async () => {
const { ctx, sut } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: false });
await sut.replaceAll(asset.id, [
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
]);
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: true });
});
it('should keep isEdited when removing some edits', async () => {
const { ctx, sut } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: false });
await sut.replaceAll(asset.id, [
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
]);
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: true });
await sut.replaceAll(asset.id, [
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
]);
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: true });
});
it('should set isEdited to false if all edits are deleted', async () => {
const { ctx, sut } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: false });
await sut.replaceAll(asset.id, [
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
]);
await sut.replaceAll(asset.id, []);
await expect(
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
).resolves.toEqual({ isEdited: false });
});
});
});

View file

@ -0,0 +1,150 @@
import { Kysely } from 'kysely';
import { AssetRepository } from 'src/repositories/asset.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { DB } from 'src/schema';
import { BaseService } from 'src/services/base.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
const { ctx } = newMediumService(BaseService, {
database: db || defaultDatabase,
real: [],
mock: [LoggingRepository],
});
return { ctx, sut: ctx.get(AssetRepository) };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(AssetRepository.name, () => {
describe('upsertExif', () => {
it('should append to locked columns', async () => {
const { ctx, sut } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({
assetId: asset.id,
dateTimeOriginal: '2023-11-19T18:11:00',
lockedProperties: ['dateTimeOriginal'],
});
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal'] });
await sut.upsertExif(
{ assetId: asset.id, lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
);
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['description', 'dateTimeOriginal'] });
});
it('should deduplicate locked columns', async () => {
const { ctx, sut } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({
assetId: asset.id,
dateTimeOriginal: '2023-11-19T18:11:00',
lockedProperties: ['dateTimeOriginal', 'description'],
});
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal', 'description'] });
await sut.upsertExif(
{ assetId: asset.id, lockedProperties: ['description'] },
{ lockedPropertiesBehavior: 'append' },
);
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['description', 'dateTimeOriginal'] });
});
});
describe('unlockProperties', () => {
it('should unlock one property', async () => {
const { ctx, sut } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({
assetId: asset.id,
dateTimeOriginal: '2023-11-19T18:11:00',
lockedProperties: ['dateTimeOriginal', 'description'],
});
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal', 'description'] });
await sut.unlockProperties(asset.id, ['dateTimeOriginal']);
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['description'] });
});
it('should unlock all properties', async () => {
const { ctx, sut } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({
assetId: asset.id,
dateTimeOriginal: '2023-11-19T18:11:00',
lockedProperties: ['dateTimeOriginal', 'description'],
});
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal', 'description'] });
await sut.unlockProperties(asset.id, ['description', 'dateTimeOriginal']);
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: null });
});
});
});

View file

@ -0,0 +1,261 @@
import { Kysely } from 'kysely';
import { AssetMediaStatus } from 'src/dtos/asset-media-response.dto';
import { AssetMediaSize } from 'src/dtos/asset-media.dto';
import { AssetFileType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { AssetMediaService } from 'src/services/asset-media.service';
import { AssetService } from 'src/services/asset.service';
import { ImmichFileResponse } from 'src/utils/file';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(AssetMediaService, {
database: db || defaultDatabase,
real: [AccessRepository, AssetRepository, UserRepository],
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(AssetService.name, () => {
describe('uploadAsset', () => {
it('should work', async () => {
const { sut, ctx } = setup();
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
ctx.getMock(EventRepository).emit.mockResolvedValue();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const auth = factory.auth({ user: { id: user.id } });
const file = mediumFactory.uploadFile();
await expect(
sut.uploadAsset(
auth,
{
deviceId: 'some-id',
deviceAssetId: 'some-id',
fileModifiedAt: new Date(),
fileCreatedAt: new Date(),
assetData: Buffer.from('some data'),
},
file,
),
).resolves.toEqual({
id: expect.any(String),
status: AssetMediaStatus.CREATED,
});
expect(ctx.getMock(EventRepository).emit).toHaveBeenCalledWith('AssetCreate', {
asset: expect.objectContaining({ deviceAssetId: 'some-id' }),
});
});
it('should work with an empty metadata list', async () => {
const { sut, ctx } = setup();
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
ctx.getMock(EventRepository).emit.mockResolvedValue();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const auth = factory.auth({ user: { id: user.id } });
const file = mediumFactory.uploadFile();
await expect(
sut.uploadAsset(
auth,
{
deviceId: 'some-id',
deviceAssetId: 'some-id',
fileModifiedAt: new Date(),
fileCreatedAt: new Date(),
assetData: Buffer.from('some data'),
metadata: [],
},
file,
),
).resolves.toEqual({
id: expect.any(String),
status: AssetMediaStatus.CREATED,
});
});
});
describe('viewThumbnail', () => {
it('should return original thumbnail by default when both exist', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
// Create both original and edited thumbnails
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Preview,
path: '/original/preview.jpg',
isEdited: false,
});
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Preview,
path: '/edited/preview.jpg',
isEdited: true,
});
const auth = factory.auth({ user: { id: user.id } });
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW });
expect(result).toBeInstanceOf(ImmichFileResponse);
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
});
it('should return edited thumbnail when edited=true', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
// Create both original and edited thumbnails
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Preview,
path: '/original/preview.jpg',
isEdited: false,
});
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Preview,
path: '/edited/preview.jpg',
isEdited: true,
});
const auth = factory.auth({ user: { id: user.id } });
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: true });
expect(result).toBeInstanceOf(ImmichFileResponse);
expect((result as ImmichFileResponse).path).toBe('/edited/preview.jpg');
});
it('should return original thumbnail when edited=false', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
// Create both original and edited thumbnails
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Preview,
path: '/original/preview.jpg',
isEdited: false,
});
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Preview,
path: '/edited/preview.jpg',
isEdited: true,
});
const auth = factory.auth({ user: { id: user.id } });
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: false });
expect(result).toBeInstanceOf(ImmichFileResponse);
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
});
it('should return original thumbnail when only original exists and edited=false', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
// Create only original thumbnail
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Preview,
path: '/original/preview.jpg',
isEdited: false,
});
const auth = factory.auth({ user: { id: user.id } });
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: false });
expect(result).toBeInstanceOf(ImmichFileResponse);
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
});
it('should return original thumbnail when only original exists and edited=true', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
// Create only original thumbnail
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Preview,
path: '/original/preview.jpg',
isEdited: false,
});
const auth = factory.auth({ user: { id: user.id } });
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: true });
expect(result).toBeInstanceOf(ImmichFileResponse);
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
});
it('should work with thumbnail size', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
// Create both original and edited thumbnails
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Thumbnail,
path: '/original/thumbnail.jpg',
isEdited: false,
});
await ctx.newAssetFile({
assetId: asset.id,
type: AssetFileType.Thumbnail,
path: '/edited/thumbnail.jpg',
isEdited: true,
});
const auth = factory.auth({ user: { id: user.id } });
// Test default (should get original)
const resultDefault = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.THUMBNAIL });
expect(resultDefault).toBeInstanceOf(ImmichFileResponse);
expect((resultDefault as ImmichFileResponse).path).toBe('/original/thumbnail.jpg');
// Test edited=true (should get edited)
const resultEdited = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.THUMBNAIL, edited: true });
expect(resultEdited).toBeInstanceOf(ImmichFileResponse);
expect((resultEdited as ImmichFileResponse).path).toBe('/edited/thumbnail.jpg');
});
});
});

View file

@ -0,0 +1,606 @@
import { Kysely } from 'kysely';
import { AssetFileType, AssetMetadataKey, JobName, SharedLinkType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StackRepository } from 'src/repositories/stack.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { AssetService } from 'src/services/asset.service';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(AssetService, {
database: db || defaultDatabase,
real: [
AssetRepository,
AssetJobRepository,
AlbumRepository,
AccessRepository,
SharedLinkAssetRepository,
StackRepository,
UserRepository,
],
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(AssetService.name, () => {
describe('getStatistics', () => {
it('should return stats as numbers, not strings', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
const auth = factory.auth({ user: { id: user.id } });
await expect(sut.getStatistics(auth, {})).resolves.toEqual({ images: 1, total: 1, videos: 0 });
});
});
describe('copy', () => {
it('should copy albums', async () => {
const { sut, ctx } = setup();
const albumRepo = ctx.get(AlbumRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
const { album } = await ctx.newAlbum({ ownerId: user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: oldAsset.id });
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
await expect(albumRepo.getAssetIds(album.id, [oldAsset.id, newAsset.id])).resolves.toEqual(
new Set([oldAsset.id, newAsset.id]),
);
});
it('should copy shared links', async () => {
const { sut, ctx } = setup();
const sharedLinkRepo = ctx.get(SharedLinkRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
const { id: sharedLinkId } = await sharedLinkRepo.create({
allowUpload: false,
key: Buffer.from('123'),
type: SharedLinkType.Individual,
userId: user.id,
assetIds: [oldAsset.id],
});
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
await expect(sharedLinkRepo.get(user.id, sharedLinkId)).resolves.toEqual(
expect.objectContaining({
assets: [expect.objectContaining({ id: oldAsset.id }), expect.objectContaining({ id: newAsset.id })],
}),
);
});
it('should merge stacks', async () => {
const { sut, ctx } = setup();
const stackRepo = ctx.get(StackRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: asset1.id, description: 'bar' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
await ctx.newExif({ assetId: asset2.id, description: 'foo' });
await ctx.newStack({ ownerId: user.id }, [oldAsset.id, asset1.id]);
const {
stack: { id: newStackId },
} = await ctx.newStack({ ownerId: user.id }, [newAsset.id, asset2.id]);
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
await expect(stackRepo.getById(oldAsset.id)).resolves.toEqual(undefined);
const newStack = await stackRepo.getById(newStackId);
expect(newStack).toEqual(
expect.objectContaining({
primaryAssetId: newAsset.id,
assets: expect.arrayContaining([expect.objectContaining({ id: asset2.id })]),
}),
);
expect(newStack!.assets.length).toEqual(4);
});
it('should copy stack', async () => {
const { sut, ctx } = setup();
const stackRepo = ctx.get(StackRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: asset1.id, description: 'bar' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
const {
stack: { id: stackId },
} = await ctx.newStack({ ownerId: user.id }, [oldAsset.id, asset1.id]);
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
const stack = await stackRepo.getById(stackId);
expect(stack).toEqual(
expect.objectContaining({
primaryAssetId: oldAsset.id,
assets: expect.arrayContaining([expect.objectContaining({ id: newAsset.id })]),
}),
);
expect(stack!.assets.length).toEqual(3);
});
it('should copy favorite status', async () => {
const { sut, ctx } = setup();
const assetRepo = ctx.get(AssetRepository);
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id, isFavorite: true });
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
await expect(assetRepo.getById(newAsset.id)).resolves.toEqual(expect.objectContaining({ isFavorite: true }));
});
it('should copy sidecar file', async () => {
const { sut, ctx } = setup();
const storageRepo = ctx.getMock(StorageRepository);
const jobRepo = ctx.getMock(JobRepository);
storageRepo.copyFile.mockResolvedValue();
jobRepo.queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({
assetId: oldAsset.id,
path: '/path/to/my/sidecar.xmp',
type: AssetFileType.Sidecar,
});
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
const auth = factory.auth({ user: { id: user.id } });
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
expect(storageRepo.copyFile).toHaveBeenCalledWith('/path/to/my/sidecar.xmp', `${newAsset.originalPath}.xmp`);
expect(jobRepo.queue).toHaveBeenCalledWith({
name: JobName.AssetExtractMetadata,
data: { id: newAsset.id },
});
});
});
describe('delete', () => {
it('should delete asset', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const thumbnailPath = '/path/to/thumbnail.jpg';
const previewPath = '/path/to/preview.jpg';
const sidecarPath = '/path/to/sidecar.xmp';
await Promise.all([
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Thumbnail, path: thumbnailPath }),
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: previewPath }),
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: sidecarPath }),
]);
await sut.handleAssetDeletion({ id: asset.id, deleteOnDisk: true });
expect(ctx.getMock(JobRepository).queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
data: { files: [thumbnailPath, previewPath, sidecarPath, asset.originalPath] },
});
});
it('should not delete offline assets', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id, isOffline: true });
const thumbnailPath = '/path/to/thumbnail.jpg';
const previewPath = '/path/to/preview.jpg';
await Promise.all([
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Thumbnail, path: thumbnailPath }),
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: previewPath }),
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: `/path/to/sidecar.xmp` }),
]);
await sut.handleAssetDeletion({ id: asset.id, deleteOnDisk: true });
expect(ctx.getMock(JobRepository).queue).toHaveBeenCalledWith({
name: JobName.FileDelete,
data: { files: [thumbnailPath, previewPath] },
});
});
});
describe('update', () => {
it('should automatically lock lockable columns', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: null });
await sut.update(auth, asset.id, {
latitude: 42,
longitude: 42,
rating: 3,
description: 'foo',
dateTimeOriginal: '2023-11-19T18:11:00+01:00',
});
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({
lockedProperties: ['timeZone', 'rating', 'description', 'latitude', 'longitude', 'dateTimeOriginal'],
});
});
it('should update dateTimeOriginal', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, description: 'test' });
await sut.update(auth, asset.id, { dateTimeOriginal: '2023-11-19T18:11:00' });
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
expect.objectContaining({
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-19T18:11:00+00:00', timeZone: null }),
}),
);
});
it('should update dateTimeOriginal with time zone', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queue.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, description: 'test' });
await sut.update(auth, asset.id, { dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
expect.objectContaining({
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-20T01:11:00+00:00', timeZone: 'UTC-7' }),
}),
);
});
});
describe('updateAll', () => {
it('should automatically lock lockable columns', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({ lockedProperties: null });
await sut.updateAll(auth, {
ids: [asset.id],
latitude: 42,
description: 'foo',
longitude: 42,
rating: 3,
dateTimeOriginal: '2023-11-19T18:11:00+01:00',
});
await expect(
ctx.database
.selectFrom('asset_exif')
.select('lockedProperties')
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({
lockedProperties: ['timeZone', 'rating', 'description', 'latitude', 'longitude', 'dateTimeOriginal'],
});
});
it('should relatively update assets', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
await sut.updateAll(auth, { ids: [asset.id], dateTimeRelative: -11 });
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
expect.objectContaining({
exifInfo: expect.objectContaining({
dateTimeOriginal: '2023-11-19T18:00:00+00:00',
}),
}),
);
});
it('should update dateTimeOriginal', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, description: 'test' });
await sut.updateAll(auth, { ids: [asset.id], dateTimeOriginal: '2023-11-19T18:11:00' });
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
expect.objectContaining({
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-19T18:11:00+00:00', timeZone: null }),
}),
);
});
it('should update dateTimeOriginal with time zone', async () => {
const { sut, ctx } = setup();
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, description: 'test' });
await sut.updateAll(auth, { ids: [asset.id], dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
expect.objectContaining({
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-20T01:11:00+00:00', timeZone: 'UTC-7' }),
}),
);
});
});
describe('upsertBulkMetadata', () => {
it('should work', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
const items = [{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } }];
await sut.upsertBulkMetadata(auth, { items });
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
expect(metadata.length).toEqual(1);
expect(metadata[0]).toEqual(
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } }),
);
});
it('should work on conflict', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'old-id' } });
// verify existing metadata
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'old-id' } }),
]);
const items = [{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'new-id' } }];
await sut.upsertBulkMetadata(auth, { items });
// verify updated metadata
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'new-id' } }),
]);
});
it('should work with multiple assets', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
const items = [
{ assetId: asset1.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
{ assetId: asset2.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } },
];
await sut.upsertBulkMetadata(auth, { items });
const metadata1 = await ctx.get(AssetRepository).getMetadata(asset1.id);
expect(metadata1).toEqual([
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } }),
]);
const metadata2 = await ctx.get(AssetRepository).getMetadata(asset2.id);
expect(metadata2).toEqual([
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } }),
]);
});
it('should work with multiple metadata for the same asset', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
const items = [
{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
{ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } },
];
await sut.upsertBulkMetadata(auth, { items });
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
expect(metadata).toEqual(
expect.arrayContaining([
expect.objectContaining({
key: AssetMetadataKey.MobileApp,
value: { iCloudId: 'id1' },
}),
expect.objectContaining({
key: 'some-other-key',
value: { foo: 'bar' },
}),
]),
);
});
});
describe('deleteBulkMetadata', () => {
it('should work', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } });
await sut.deleteBulkMetadata(auth, { items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }] });
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
expect(metadata.length).toEqual(0);
});
it('should work even if the item does not exist', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await sut.deleteBulkMetadata(auth, { items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }] });
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
expect(metadata.length).toEqual(0);
});
it('should work with multiple assets', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
await ctx.newMetadata({ assetId: asset1.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
await ctx.newMetadata({ assetId: asset2.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } });
await sut.deleteBulkMetadata(auth, {
items: [
{ assetId: asset1.id, key: AssetMetadataKey.MobileApp },
{ assetId: asset2.id, key: AssetMetadataKey.MobileApp },
],
});
await expect(ctx.get(AssetRepository).getMetadata(asset1.id)).resolves.toEqual([]);
await expect(ctx.get(AssetRepository).getMetadata(asset2.id)).resolves.toEqual([]);
});
it('should work with multiple metadata for the same asset', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
await ctx.newMetadata({ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } });
await sut.deleteBulkMetadata(auth, {
items: [
{ assetId: asset.id, key: AssetMetadataKey.MobileApp },
{ assetId: asset.id, key: 'some-other-key' },
],
});
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([]);
});
it('should not delete unspecified keys', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
await ctx.newMetadata({ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } });
await sut.deleteBulkMetadata(auth, {
items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }],
});
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
expect(metadata).toEqual([expect.objectContaining({ key: 'some-other-key', value: { foo: 'bar' } })]);
});
});
});

View file

@ -0,0 +1,84 @@
import { LoggingRepository } from 'src/repositories/logging.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { partner_delete_audit, stack_delete_audit } from 'src/schema/functions';
import { BaseService } from 'src/services/base.service';
import { MediumTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
describe('audit', () => {
let ctx: MediumTestContext;
beforeAll(async () => {
ctx = new MediumTestContext(BaseService, {
database: await getKyselyDB(),
real: [],
mock: [LoggingRepository],
});
});
describe(partner_delete_audit.name, () => {
it('should not cascade user deletes to partners_audit', async () => {
const partnerRepo = ctx.get(PartnerRepository);
const userRepo = ctx.get(UserRepository);
const { user: user1 } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id });
await userRepo.delete(user1, true);
await expect(
ctx.database.selectFrom('partner_audit').select(['id']).where('sharedById', '=', user1.id).execute(),
).resolves.toHaveLength(0);
});
});
describe(stack_delete_audit.name, () => {
it('should not cascade user deletes to stacks_audit', async () => {
const userRepo = ctx.get(UserRepository);
const { user } = await ctx.newUser();
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
await userRepo.delete(user, true);
await expect(
ctx.database.selectFrom('stack_audit').select(['id']).where('userId', '=', user.id).execute(),
).resolves.toHaveLength(0);
});
});
describe('assets_audit', () => {
it('should not cascade user deletes to assets_audit', async () => {
const userRepo = ctx.get(UserRepository);
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await userRepo.delete(user, true);
await expect(
ctx.database.selectFrom('asset_audit').select(['id']).where('assetId', '=', asset.id).execute(),
).resolves.toHaveLength(0);
});
});
describe('exif', () => {
it('should automatically set updatedAt and updateId when the row is updated', async () => {
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const before = await ctx.database
.selectFrom('asset_exif')
.select(['updatedAt', 'updateId'])
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow();
await ctx.newExif({ assetId: asset.id, make: 'Canon 2' });
const after = await ctx.database
.selectFrom('asset_exif')
.select(['updatedAt', 'updateId'])
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow();
expect(before.updateId).not.toEqual(after.updateId);
expect(before.updatedAt).not.toEqual(after.updatedAt);
});
});
});

View file

@ -0,0 +1,66 @@
import { Kysely } from 'kysely';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { AuthAdminService } from 'src/services/auth-admin.service';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(AuthAdminService, {
database: db || defaultDatabase,
real: [UserRepository],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(AuthAdminService.name, () => {
describe('unlinkAll', () => {
it('should reset user.oauthId', async () => {
const { sut, ctx } = setup();
const userRepo = ctx.get(UserRepository);
const { user } = await ctx.newUser({ oauthId: 'test-oauth-id' });
const auth = factory.auth();
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
await expect(userRepo.get(user.id, { withDeleted: true })).resolves.toEqual(
expect.objectContaining({ oauthId: '' }),
);
});
it('should reset a deleted user', async () => {
const { sut, ctx } = setup();
const userRepo = ctx.get(UserRepository);
const { user } = await ctx.newUser({ oauthId: 'test-oauth-id', deletedAt: new Date() });
const auth = factory.auth();
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
await expect(userRepo.get(user.id, { withDeleted: true })).resolves.toEqual(
expect.objectContaining({ oauthId: '' }),
);
});
it('should reset multiple users', async () => {
const { sut, ctx } = setup();
const userRepo = ctx.get(UserRepository);
const { user: user1 } = await ctx.newUser({ oauthId: '1' });
const { user: user2 } = await ctx.newUser({ oauthId: '2', deletedAt: new Date() });
const auth = factory.auth();
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
await expect(userRepo.get(user1.id, { withDeleted: true })).resolves.toEqual(
expect.objectContaining({ oauthId: '' }),
);
await expect(userRepo.get(user2.id, { withDeleted: true })).resolves.toEqual(
expect.objectContaining({ oauthId: '' }),
);
});
});
});

View file

@ -0,0 +1,166 @@
import { BadRequestException } from '@nestjs/common';
import { hash } from 'bcrypt';
import { Kysely } from 'kysely';
import { AuthType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SessionRepository } from 'src/repositories/session.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { TelemetryRepository } from 'src/repositories/telemetry.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { AuthService } from 'src/services/auth.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(AuthService, {
database: db || defaultDatabase,
real: [
AccessRepository,
ConfigRepository,
CryptoRepository,
DatabaseRepository,
SessionRepository,
SystemMetadataRepository,
UserRepository,
],
mock: [LoggingRepository, StorageRepository, EventRepository, TelemetryRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(AuthService.name, () => {
describe('adminSignUp', () => {
it(`should sign up the admin`, async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
await expect(sut.adminSignUp(dto)).resolves.toEqual(
expect.objectContaining({
id: expect.any(String),
email: dto.email,
name: dto.name,
isAdmin: true,
}),
);
});
it('should not allow a second admin to sign up', async () => {
const { sut, ctx } = setup();
await ctx.newUser({ isAdmin: true });
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
const response = sut.adminSignUp(dto);
await expect(response).rejects.toThrow(BadRequestException);
await expect(response).rejects.toThrow('The server already has an admin');
});
});
describe('login', () => {
it('should reject an incorrect password', async () => {
const { sut, ctx } = setup();
const password = 'password';
const passwordHashed = await hash(password, 10);
const { user } = await ctx.newUser({ password: passwordHashed });
const dto = { email: user.email, password: 'wrong-password' };
await expect(sut.login(dto, mediumFactory.loginDetails())).rejects.toThrow('Incorrect email or password');
});
it('should accept a correct password and return a login response', async () => {
const { sut, ctx } = setup();
const password = 'password';
const passwordHashed = await hash(password, 10);
const { user } = await ctx.newUser({ password: passwordHashed });
const dto = { email: user.email, password };
await expect(sut.login(dto, mediumFactory.loginDetails())).resolves.toEqual({
accessToken: expect.any(String),
isAdmin: user.isAdmin,
isOnboarded: false,
name: user.name,
profileImagePath: user.profileImagePath,
userId: user.id,
userEmail: user.email,
shouldChangePassword: user.shouldChangePassword,
});
});
});
describe('logout', () => {
it('should logout', async () => {
const { sut } = setup();
const auth = factory.auth();
await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
});
it('should cleanup the session', async () => {
const { sut, ctx } = setup();
const sessionRepo = ctx.get(SessionRepository);
const eventRepo = ctx.getMock(EventRepository);
const { user } = await ctx.newUser();
const { session } = await ctx.newSession({ userId: user.id });
const auth = factory.auth({ session, user });
eventRepo.emit.mockResolvedValue();
await expect(sessionRepo.get(session.id)).resolves.toEqual(expect.objectContaining({ id: session.id }));
await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({
successful: true,
redirectUri: '/auth/login?autoLaunch=0',
});
await expect(sessionRepo.get(session.id)).resolves.toBeUndefined();
});
});
describe('changePassword', () => {
it('should change the password and login with it', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const dto = { password: 'password', newPassword: 'new-password' };
const passwordHashed = await hash(dto.password, 10);
const { user } = await ctx.newUser({ password: passwordHashed });
const auth = factory.auth({ user });
const response = await sut.changePassword(auth, dto);
expect(response).toEqual(
expect.objectContaining({
id: user.id,
email: user.email,
}),
);
expect((response as any).password).not.toBeDefined();
await expect(
sut.login({ email: user.email, password: dto.newPassword }, mediumFactory.loginDetails()),
).resolves.toBeDefined();
});
it('should validate the current password', async () => {
const { sut, ctx } = setup();
const dto = { password: 'wrong-password', newPassword: 'new-password' };
const passwordHashed = await hash('password', 10);
const { user } = await ctx.newUser({ password: passwordHashed });
const auth = factory.auth({ user });
const response = sut.changePassword(auth, dto);
await expect(response).rejects.toThrow(BadRequestException);
await expect(response).rejects.toThrow('Wrong password');
});
});
});

View file

@ -0,0 +1,246 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { AssetFileType, MemoryType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { MemoryService } from 'src/services/memory.service';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(MemoryService, {
database: db || defaultDatabase,
real: [
AccessRepository,
AssetRepository,
DatabaseRepository,
MemoryRepository,
UserRepository,
SystemMetadataRepository,
UserRepository,
PartnerRepository,
],
mock: [LoggingRepository],
});
};
describe(MemoryService.name, () => {
beforeEach(async () => {
defaultDatabase = await getKyselyDB();
});
describe('create', () => {
it('should create a new memory', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const dto = {
type: MemoryType.OnThisDay,
data: { year: 2021 },
memoryAt: new Date(2021),
};
await expect(sut.create(auth, dto)).resolves.toEqual({
id: expect.any(String),
type: dto.type,
data: dto.data,
createdAt: expect.any(Date),
updatedAt: expect.any(Date),
isSaved: false,
memoryAt: dto.memoryAt,
ownerId: user.id,
assets: [],
});
});
it('should create a new memory (with assets)', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
const auth = factory.auth({ user });
const dto = {
type: MemoryType.OnThisDay,
data: { year: 2021 },
memoryAt: new Date(2021),
assetIds: [asset1.id, asset2.id],
};
await expect(sut.create(auth, dto)).resolves.toEqual(
expect.objectContaining({
id: expect.any(String),
assets: [expect.objectContaining({ id: asset1.id }), expect.objectContaining({ id: asset2.id })],
}),
);
});
it('should create a new memory and ignore assets the user does not have access to', async () => {
const { sut, ctx } = setup();
const { user: user1 } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const { asset: asset1 } = await ctx.newAsset({ ownerId: user1.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
const auth = factory.auth({ user: user1 });
const dto = {
type: MemoryType.OnThisDay,
data: { year: 2021 },
memoryAt: new Date(2021),
assetIds: [asset1.id, asset2.id],
};
await expect(sut.create(auth, dto)).resolves.toEqual(
expect.objectContaining({
id: expect.any(String),
assets: [expect.objectContaining({ id: asset1.id })],
}),
);
});
});
describe('onMemoryCreate', () => {
it('should work on an empty database', async () => {
const { sut } = setup();
await expect(sut.onMemoriesCreate()).resolves.not.toThrow();
});
it('should create a memory from an asset', async () => {
const { sut, ctx } = setup();
const assetRepo = ctx.get(AssetRepository);
const memoryRepo = ctx.get(MemoryRepository);
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' }) as DateTime<true>;
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
await Promise.all([
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
ctx.newJobStatus({ assetId: asset.id }),
assetRepo.upsertFiles([
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
]),
]);
vi.setSystemTime(now.toJSDate());
await sut.onMemoriesCreate();
const memories = await memoryRepo.search(user.id, {});
expect(memories.length).toBe(1);
expect(memories[0]).toEqual(
expect.objectContaining({
id: expect.any(String),
createdAt: expect.any(Date),
memoryAt: expect.any(Date),
updatedAt: expect.any(Date),
deletedAt: null,
ownerId: user.id,
assets: expect.arrayContaining([expect.objectContaining({ id: asset.id })]),
isSaved: false,
showAt: now.startOf('day').toJSDate(),
hideAt: now.endOf('day').toJSDate(),
seenAt: null,
type: 'on_this_day',
data: { year: 2024 },
}),
);
});
it('should create a memory from an asset - in advance', async () => {
const { sut, ctx } = setup();
const assetRepo = ctx.get(AssetRepository);
const memoryRepo = ctx.get(MemoryRepository);
const now = DateTime.fromObject({ year: 2035, month: 2, day: 26 }, { zone: 'utc' }) as DateTime<true>;
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
await Promise.all([
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
ctx.newJobStatus({ assetId: asset.id }),
assetRepo.upsertFiles([
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
]),
]);
vi.setSystemTime(now.toJSDate());
await sut.onMemoriesCreate();
const memories = await memoryRepo.search(user.id, {});
expect(memories.length).toBe(1);
expect(memories[0]).toEqual(
expect.objectContaining({
id: expect.any(String),
createdAt: expect.any(Date),
memoryAt: expect.any(Date),
updatedAt: expect.any(Date),
deletedAt: null,
ownerId: user.id,
assets: expect.arrayContaining([expect.objectContaining({ id: asset.id })]),
isSaved: false,
showAt: now.startOf('day').toJSDate(),
hideAt: now.endOf('day').toJSDate(),
seenAt: null,
type: 'on_this_day',
data: { year: 2034 },
}),
);
});
it('should not generate a memory twice for the same day', async () => {
const { sut, ctx } = setup();
const assetRepo = ctx.get(AssetRepository);
const memoryRepo = ctx.get(MemoryRepository);
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' }) as DateTime<true>;
const { user } = await ctx.newUser();
for (const dto of [
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 3 }).toISO(),
},
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 4 }).toISO(),
},
{
ownerId: user.id,
localDateTime: now.minus({ year: 1 }).plus({ days: 5 }).toISO(),
},
]) {
const { asset } = await ctx.newAsset(dto);
await Promise.all([
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
ctx.newJobStatus({ assetId: asset.id }),
assetRepo.upsertFiles([
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
]),
]);
}
vi.setSystemTime(now.toJSDate());
await sut.onMemoriesCreate();
const memories = await memoryRepo.search(user.id, {});
expect(memories.length).toBe(1);
await sut.onMemoriesCreate();
const memoriesAfter = await memoryRepo.search(user.id, {});
expect(memoriesAfter.length).toBe(1);
});
});
describe('onMemoriesCleanup', () => {
it('should run without error', async () => {
const { sut } = setup();
await expect(sut.onMemoriesCleanup()).resolves.not.toThrow();
});
});
});

View file

@ -0,0 +1,108 @@
import { Stats } from 'node:fs';
import { writeFile } from 'node:fs/promises';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MetadataService } from 'src/services/metadata.service';
import { automock, newRandomImage, newTestService, ServiceMocks } from 'test/utils';
const metadataRepository = new MetadataRepository(
// eslint-disable-next-line no-sparse-arrays
automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }),
);
const createTestFile = async (exifData: Record<string, any>) => {
const data = newRandomImage();
const filePath = join(tmpdir(), 'test.png');
await writeFile(filePath, data);
await metadataRepository.writeTags(filePath, exifData);
return { filePath };
};
type TimeZoneTest = {
description: string;
serverTimeZone?: string;
exifData: Record<string, any>;
expected: {
localDateTime: string;
dateTimeOriginal: string;
timeZone: string | null;
};
};
describe(MetadataService.name, () => {
let sut: MetadataService;
let mocks: ServiceMocks;
beforeEach(() => {
({ sut, mocks } = newTestService(MetadataService, { metadata: metadataRepository }));
mocks.storage.stat.mockResolvedValue({
size: 123_456,
mtime: new Date(654_321),
mtimeMs: 654_321,
birthtimeMs: 654_322,
} as Stats);
delete process.env.TZ;
});
it('should be defined', () => {
expect(sut).toBeDefined();
});
describe('handleMetadataExtraction', () => {
const timeZoneTests: TimeZoneTest[] = [
{
description: 'should handle no time zone information',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2022-01-01T00:00:00.000Z',
timeZone: null,
},
},
{
description: 'should handle a +13:00 time zone',
exifData: {
DateTimeOriginal: '2022:01:01 00:00:00+13:00',
},
expected: {
localDateTime: '2022-01-01T00:00:00.000Z',
dateTimeOriginal: '2021-12-31T11:00:00.000Z',
timeZone: 'UTC+13',
},
},
];
it.each(timeZoneTests)('$description', async ({ exifData, serverTimeZone, expected }) => {
process.env.TZ = serverTimeZone ?? undefined;
const { filePath } = await createTestFile(exifData);
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
id: 'asset-1',
originalPath: filePath,
files: [],
} as any);
await sut.handleMetadataExtraction({ id: 'asset-1' });
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
expect.objectContaining({
dateTimeOriginal: new Date(expected.dateTimeOriginal),
timeZone: expected.timeZone,
}),
{ lockedPropertiesBehavior: 'skip' },
);
expect(mocks.asset.update).toHaveBeenCalledWith(
expect.objectContaining({
localDateTime: new Date(expected.localDateTime),
}),
);
});
});
});

View file

@ -0,0 +1,249 @@
import { Kysely } from 'kysely';
import { AssetFileType, JobStatus } from 'src/enum';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { DB } from 'src/schema';
import { OcrService } from 'src/services/ocr.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(OcrService, {
database: db || defaultDatabase,
real: [AssetRepository, AssetJobRepository, ConfigRepository, OcrRepository, SystemMetadataRepository],
mock: [JobRepository, LoggingRepository, MachineLearningRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(OcrService.name, () => {
it('should work', () => {
const { sut } = setup();
expect(sut).toBeDefined();
});
it('should parse asset', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: [10, 10, 50, 10, 50, 50, 10, 50],
boxScore: [0.99],
text: ['Test OCR'],
textScore: [0.95],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
{
assetId: asset.id,
boxScore: 0.99,
id: expect.any(String),
text: 'Test OCR',
textScore: 0.95,
isVisible: true,
x1: 10,
y1: 10,
x2: 50,
y2: 10,
x3: 50,
y3: 50,
x4: 10,
y4: 50,
},
]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toEqual({
assetId: asset.id,
text: 'Test OCR',
});
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should handle multiple boxes', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: Array.from({ length: 8 * 5 }, (_, i) => i),
boxScore: [0.7, 0.67, 0.65, 0.62, 0.6],
text: ['One', 'Two', 'Three', 'Four', 'Five'],
textScore: [0.9, 0.89, 0.88, 0.87, 0.86],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
{
assetId: asset.id,
boxScore: 0.7,
id: expect.any(String),
text: 'One',
textScore: 0.9,
isVisible: true,
x1: 0,
y1: 1,
x2: 2,
y2: 3,
x3: 4,
y3: 5,
x4: 6,
y4: 7,
},
{
assetId: asset.id,
boxScore: 0.67,
id: expect.any(String),
text: 'Two',
textScore: 0.89,
isVisible: true,
x1: 8,
y1: 9,
x2: 10,
y2: 11,
x3: 12,
y3: 13,
x4: 14,
y4: 15,
},
{
assetId: asset.id,
boxScore: 0.65,
id: expect.any(String),
text: 'Three',
textScore: 0.88,
isVisible: true,
x1: 16,
y1: 17,
x2: 18,
y2: 19,
x3: 20,
y3: 21,
x4: 22,
y4: 23,
},
{
assetId: asset.id,
boxScore: 0.62,
id: expect.any(String),
text: 'Four',
textScore: 0.87,
isVisible: true,
x1: 24,
y1: 25,
x2: 26,
y2: 27,
x3: 28,
y3: 29,
x4: 30,
y4: 31,
},
{
assetId: asset.id,
boxScore: 0.6,
id: expect.any(String),
text: 'Five',
textScore: 0.86,
isVisible: true,
x1: 32,
y1: 33,
x2: 34,
y2: 35,
x3: 36,
y3: 37,
x4: 38,
y4: 39,
},
]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toEqual({
assetId: asset.id,
text: 'One Two Three Four Five',
});
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should handle no boxes', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toBeUndefined();
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should update existing results', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: [10, 10, 50, 10, 50, 50, 10, 50],
boxScore: [0.99],
text: ['Test OCR'],
textScore: [0.95],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toBeUndefined();
});
});

View file

@ -0,0 +1,80 @@
import { Kysely } from 'kysely';
import { AccessRepository } from 'src/repositories/access.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { DB } from 'src/schema';
import { PersonService } from 'src/services/person.service';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(PersonService, {
database: db || defaultDatabase,
real: [AccessRepository, DatabaseRepository, PersonRepository],
mock: [LoggingRepository, StorageRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(PersonService.name, () => {
describe('delete', () => {
it('should throw an error when there is no access', async () => {
const { sut } = setup();
const auth = factory.auth();
const personId = factory.uuid();
await expect(sut.delete(auth, personId)).rejects.toThrow('Not found or no person.delete access');
});
it('should delete the person', async () => {
const { sut, ctx } = setup();
const personRepo = ctx.get(PersonRepository);
const storageMock = ctx.getMock(StorageRepository);
const { user } = await ctx.newUser();
const { person } = await ctx.newPerson({ ownerId: user.id });
const auth = factory.auth({ user });
storageMock.unlink.mockResolvedValue();
await expect(personRepo.getById(person.id)).resolves.toEqual(expect.objectContaining({ id: person.id }));
await expect(sut.delete(auth, person.id)).resolves.toBeUndefined();
await expect(personRepo.getById(person.id)).resolves.toBeUndefined();
expect(storageMock.unlink).toHaveBeenCalledWith(person.thumbnailPath);
});
});
describe('deleteAll', () => {
it('should throw an error when there is no access', async () => {
const { sut } = setup();
const auth = factory.auth();
const personId = factory.uuid();
await expect(sut.deleteAll(auth, { ids: [personId] })).rejects.toThrow('Not found or no person.delete access');
});
it('should delete the person', async () => {
const { sut, ctx } = setup();
const storageMock = ctx.getMock(StorageRepository);
const personRepo = ctx.get(PersonRepository);
const { user } = await ctx.newUser();
const { person: person1 } = await ctx.newPerson({ ownerId: user.id });
const { person: person2 } = await ctx.newPerson({ ownerId: user.id });
const auth = factory.auth({ user });
storageMock.unlink.mockResolvedValue();
await expect(sut.deleteAll(auth, { ids: [person1.id, person2.id] })).resolves.toBeUndefined();
await expect(personRepo.getById(person1.id)).resolves.toBeUndefined();
await expect(personRepo.getById(person2.id)).resolves.toBeUndefined();
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
expect(storageMock.unlink).toHaveBeenCalledWith(person1.thumbnailPath);
expect(storageMock.unlink).toHaveBeenCalledWith(person2.thumbnailPath);
});
});
});

View file

@ -0,0 +1,308 @@
import { Kysely } from 'kysely';
import { PluginContext } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { PluginRepository } from 'src/repositories/plugin.repository';
import { DB } from 'src/schema';
import { PluginService } from 'src/services/plugin.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
let pluginRepo: PluginRepository;
const setup = (db?: Kysely<DB>) => {
return newMediumService(PluginService, {
database: db || defaultDatabase,
real: [PluginRepository, AccessRepository],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
pluginRepo = new PluginRepository(defaultDatabase);
});
afterEach(async () => {
await defaultDatabase.deleteFrom('plugin').execute();
});
describe(PluginService.name, () => {
describe('getAll', () => {
it('should return empty array when no plugins exist', async () => {
const { sut } = setup();
const plugins = await sut.getAll();
expect(plugins).toEqual([]);
});
it('should return plugin without filters and actions', async () => {
const { sut } = setup();
const result = await pluginRepo.loadPlugin(
{
name: 'test-plugin',
title: 'Test Plugin',
description: 'A test plugin',
author: 'Test Author',
version: '1.0.0',
wasm: { path: '/path/to/test.wasm' },
},
'/test/base/path',
);
const plugins = await sut.getAll();
expect(plugins).toHaveLength(1);
expect(plugins[0]).toMatchObject({
id: result.plugin.id,
name: 'test-plugin',
description: 'A test plugin',
author: 'Test Author',
version: '1.0.0',
filters: [],
actions: [],
});
});
it('should return plugin with filters and actions', async () => {
const { sut } = setup();
const result = await pluginRepo.loadPlugin(
{
name: 'full-plugin',
title: 'Full Plugin',
description: 'A plugin with filters and actions',
author: 'Test Author',
version: '1.0.0',
wasm: { path: '/path/to/full.wasm' },
filters: [
{
methodName: 'test-filter',
title: 'Test Filter',
description: 'A test filter',
supportedContexts: [PluginContext.Asset],
schema: { type: 'object', properties: {} },
},
],
actions: [
{
methodName: 'test-action',
title: 'Test Action',
description: 'A test action',
supportedContexts: [PluginContext.Asset],
schema: { type: 'object', properties: {} },
},
],
},
'/test/base/path',
);
const plugins = await sut.getAll();
expect(plugins).toHaveLength(1);
expect(plugins[0]).toMatchObject({
id: result.plugin.id,
name: 'full-plugin',
filters: [
{
id: result.filters[0].id,
pluginId: result.plugin.id,
methodName: 'test-filter',
title: 'Test Filter',
description: 'A test filter',
supportedContexts: [PluginContext.Asset],
schema: { type: 'object', properties: {} },
},
],
actions: [
{
id: result.actions[0].id,
pluginId: result.plugin.id,
methodName: 'test-action',
title: 'Test Action',
description: 'A test action',
supportedContexts: [PluginContext.Asset],
schema: { type: 'object', properties: {} },
},
],
});
});
it('should return multiple plugins with their respective filters and actions', async () => {
const { sut } = setup();
await pluginRepo.loadPlugin(
{
name: 'plugin-1',
title: 'Plugin 1',
description: 'First plugin',
author: 'Author 1',
version: '1.0.0',
wasm: { path: '/path/to/plugin1.wasm' },
filters: [
{
methodName: 'filter-1',
title: 'Filter 1',
description: 'Filter for plugin 1',
supportedContexts: [PluginContext.Asset],
schema: undefined,
},
],
},
'/test/base/path',
);
await pluginRepo.loadPlugin(
{
name: 'plugin-2',
title: 'Plugin 2',
description: 'Second plugin',
author: 'Author 2',
version: '2.0.0',
wasm: { path: '/path/to/plugin2.wasm' },
actions: [
{
methodName: 'action-2',
title: 'Action 2',
description: 'Action for plugin 2',
supportedContexts: [PluginContext.Album],
schema: undefined,
},
],
},
'/test/base/path',
);
const plugins = await sut.getAll();
expect(plugins).toHaveLength(2);
expect(plugins[0].name).toBe('plugin-1');
expect(plugins[0].filters).toHaveLength(1);
expect(plugins[0].actions).toHaveLength(0);
expect(plugins[1].name).toBe('plugin-2');
expect(plugins[1].filters).toHaveLength(0);
expect(plugins[1].actions).toHaveLength(1);
});
it('should handle plugin with multiple filters and actions', async () => {
const { sut } = setup();
await pluginRepo.loadPlugin(
{
name: 'multi-plugin',
title: 'Multi Plugin',
description: 'Plugin with multiple items',
author: 'Test Author',
version: '1.0.0',
wasm: { path: '/path/to/multi.wasm' },
filters: [
{
methodName: 'filter-a',
title: 'Filter A',
description: 'First filter',
supportedContexts: [PluginContext.Asset],
schema: undefined,
},
{
methodName: 'filter-b',
title: 'Filter B',
description: 'Second filter',
supportedContexts: [PluginContext.Album],
schema: undefined,
},
],
actions: [
{
methodName: 'action-x',
title: 'Action X',
description: 'First action',
supportedContexts: [PluginContext.Asset],
schema: undefined,
},
{
methodName: 'action-y',
title: 'Action Y',
description: 'Second action',
supportedContexts: [PluginContext.Person],
schema: undefined,
},
],
},
'/test/base/path',
);
const plugins = await sut.getAll();
expect(plugins).toHaveLength(1);
expect(plugins[0].filters).toHaveLength(2);
expect(plugins[0].actions).toHaveLength(2);
});
});
describe('get', () => {
it('should throw error when plugin does not exist', async () => {
const { sut } = setup();
await expect(sut.get('00000000-0000-0000-0000-000000000000')).rejects.toThrow('Plugin not found');
});
it('should return single plugin with filters and actions', async () => {
const { sut } = setup();
const result = await pluginRepo.loadPlugin(
{
name: 'single-plugin',
title: 'Single Plugin',
description: 'A single plugin',
author: 'Test Author',
version: '1.0.0',
wasm: { path: '/path/to/single.wasm' },
filters: [
{
methodName: 'single-filter',
title: 'Single Filter',
description: 'A single filter',
supportedContexts: [PluginContext.Asset],
schema: undefined,
},
],
actions: [
{
methodName: 'single-action',
title: 'Single Action',
description: 'A single action',
supportedContexts: [PluginContext.Asset],
schema: undefined,
},
],
},
'/test/base/path',
);
const pluginResult = await sut.get(result.plugin.id);
expect(pluginResult).toMatchObject({
id: result.plugin.id,
name: 'single-plugin',
filters: [
{
id: result.filters[0].id,
methodName: 'single-filter',
title: 'Single Filter',
},
],
actions: [
{
id: result.actions[0].id,
methodName: 'single-action',
title: 'Single Action',
},
],
});
});
});
});

View file

@ -0,0 +1,91 @@
import { Kysely } from 'kysely';
import { AccessRepository } from 'src/repositories/access.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { SearchRepository } from 'src/repositories/search.repository';
import { DB } from 'src/schema';
import { SearchService } from 'src/services/search.service';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(SearchService, {
database: db || defaultDatabase,
real: [
AccessRepository,
AssetRepository,
DatabaseRepository,
SearchRepository,
PartnerRepository,
PersonRepository,
],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SearchService.name, () => {
it('should work', () => {
const { sut } = setup();
expect(sut).toBeDefined();
});
it('should return assets', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const assets = [];
const sizes = [12_334, 599, 123_456];
for (let i = 0; i < sizes.length; i++) {
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, fileSizeInByte: sizes[i] });
assets.push(asset);
}
const auth = factory.auth({ user: { id: user.id } });
await expect(sut.searchLargeAssets(auth, {})).resolves.toEqual([
expect.objectContaining({ id: assets[2].id }),
expect.objectContaining({ id: assets[0].id }),
expect.objectContaining({ id: assets[1].id }),
]);
});
describe('searchStatistics', () => {
it('should return statistics when filtering by personIds', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const { person } = await ctx.newPerson({ ownerId: user.id });
await ctx.newAssetFace({ assetId: asset.id, personId: person.id });
const auth = factory.auth({ user: { id: user.id } });
const result = await sut.searchStatistics(auth, { personIds: [person.id] });
expect(result).toEqual({ total: 1 });
});
it('should return zero when no assets match the personIds filter', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { person } = await ctx.newPerson({ ownerId: user.id });
const auth = factory.auth({ user: { id: user.id } });
const result = await sut.searchStatistics(auth, { personIds: [person.id] });
expect(result).toEqual({ total: 0 });
});
});
});

View file

@ -0,0 +1,127 @@
import { Kysely } from 'kysely';
import { randomBytes } from 'node:crypto';
import { SharedLinkType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { DB } from 'src/schema';
import { SharedLinkService } from 'src/services/shared-link.service';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(SharedLinkService, {
database: db || defaultDatabase,
real: [AccessRepository, DatabaseRepository, SharedLinkRepository, SharedLinkAssetRepository],
mock: [LoggingRepository, StorageRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SharedLinkService.name, () => {
describe('get', () => {
it('should return the correct dates on the shared link album', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { album } = await ctx.newAlbum({ ownerId: user.id });
const dates = ['2021-01-01T00:00:00.000Z', '2022-01-01T00:00:00.000Z', '2020-01-01T00:00:00.000Z'];
for (const date of dates) {
const { asset } = await ctx.newAsset({ fileCreatedAt: date, localDateTime: date, ownerId: user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
}
const sharedLinkRepo = ctx.get(SharedLinkRepository);
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(16),
id: factory.uuid(),
userId: user.id,
albumId: album.id,
allowUpload: true,
type: SharedLinkType.Album,
});
await expect(sut.get(auth, sharedLink.id)).resolves.toMatchObject({
album: expect.objectContaining({
startDate: '2020-01-01T00:00:00+00:00',
endDate: '2022-01-01T00:00:00+00:00',
}),
});
});
});
it('should share individually assets', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const assets = await Promise.all([
ctx.newAsset({ ownerId: user.id }),
ctx.newAsset({ ownerId: user.id }),
ctx.newAsset({ ownerId: user.id }),
]);
for (const { asset } of assets) {
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
}
const sharedLinkRepo = ctx.get(SharedLinkRepository);
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(16),
id: factory.uuid(),
userId: user.id,
allowUpload: false,
type: SharedLinkType.Individual,
assetIds: assets.map(({ asset }) => asset.id),
});
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toMatchObject({
assets: assets.map(({ asset }) => expect.objectContaining({ id: asset.id })),
});
});
it('should remove individually shared asset', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const sharedLinkRepo = ctx.get(SharedLinkRepository);
const sharedLink = await sharedLinkRepo.create({
key: randomBytes(16),
id: factory.uuid(),
userId: user.id,
allowUpload: false,
type: SharedLinkType.Individual,
assetIds: [asset.id],
});
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toMatchObject({
assets: [expect.objectContaining({ id: asset.id })],
});
await sut.removeAssets(auth, sharedLink.id, {
assetIds: [asset.id],
});
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toHaveProperty('assets', []);
});
});

View file

@ -0,0 +1,46 @@
import { Kysely } from 'kysely';
import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { StorageRepository } from 'src/repositories/storage.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { DB } from 'src/schema';
import { StorageService } from 'src/services/storage.service';
import { newMediumService } from 'test/medium.factory';
import { mockEnvData } from 'test/repositories/config.repository.mock';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(StorageService, {
database: db || defaultDatabase,
real: [AssetRepository, DatabaseRepository, SystemMetadataRepository],
mock: [StorageRepository, ConfigRepository, LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(StorageService.name, () => {
describe('onBoostrap', () => {
it('should work', async () => {
const { sut, ctx } = setup();
const configMock = ctx.getMock(ConfigRepository);
configMock.getEnv.mockReturnValue(mockEnvData({}));
const storageMock = ctx.getMock(StorageRepository);
storageMock.mkdirSync.mockReturnValue(void 0);
storageMock.existsSync.mockReturnValue(true);
storageMock.createFile.mockResolvedValue(void 0);
storageMock.overwriteFile.mockResolvedValue(void 0);
storageMock.readFile.mockResolvedValue(Buffer.from('test content'));
await expect(sut.onBootstrap()).resolves.toBeUndefined();
});
});
});

View file

@ -0,0 +1,226 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { AssetMetadataKey, UserMetadataKey } from 'src/enum';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SyncRepository } from 'src/repositories/sync.repository';
import { DB } from 'src/schema';
import { SyncService } from 'src/services/sync.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
import { v4 } from 'uuid';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(SyncService, {
database: db || defaultDatabase,
real: [DatabaseRepository, SyncRepository],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
const deletedLongAgo = DateTime.now().minus({ days: 35 }).toISO();
const assertTableCount = async <T extends keyof DB>(db: Kysely<DB>, t: T, count: number) => {
const { table } = db.dynamic;
const results = await db.selectFrom(table(t).as(t)).selectAll().execute();
expect(results).toHaveLength(count);
};
describe(SyncService.name, () => {
describe('onAuditTableCleanup', () => {
it('should work', async () => {
const { sut } = setup();
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
});
it('should cleanup the album_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_audit';
await ctx.database
.insertInto(tableName)
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the album_asset_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_asset_audit';
const { user } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user.id });
await ctx.database
.insertInto(tableName)
.values({ albumId: album.id, assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the album_user_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'album_user_audit';
await ctx.database
.insertInto(tableName)
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the asset_audit table', async () => {
const { sut, ctx } = setup();
await ctx.database
.insertInto('asset_audit')
.values({ assetId: v4(), ownerId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, 'asset_audit', 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, 'asset_audit', 0);
});
it('should cleanup the asset_face_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'asset_face_audit';
await ctx.database
.insertInto(tableName)
.values({ assetFaceId: v4(), assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the asset_metadata_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'asset_metadata_audit';
await ctx.database
.insertInto(tableName)
.values({ assetId: v4(), key: AssetMetadataKey.MobileApp, deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the memory_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'memory_audit';
await ctx.database
.insertInto(tableName)
.values({ memoryId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the memory_asset_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'memory_asset_audit';
const { user } = await ctx.newUser();
const { memory } = await ctx.newMemory({ ownerId: user.id });
await ctx.database
.insertInto(tableName)
.values({ memoryId: memory.id, assetId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the partner_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'partner_audit';
await ctx.database
.insertInto(tableName)
.values({ sharedById: v4(), sharedWithId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the stack_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'stack_audit';
await ctx.database
.insertInto(tableName)
.values({ stackId: v4(), userId: v4(), deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the user_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'user_audit';
await ctx.database.insertInto(tableName).values({ userId: v4(), deletedAt: deletedLongAgo }).execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should cleanup the user_metadata_audit table', async () => {
const { sut, ctx } = setup();
const tableName = 'user_metadata_audit';
await ctx.database
.insertInto(tableName)
.values({ userId: v4(), key: UserMetadataKey.Onboarding, deletedAt: deletedLongAgo })
.execute();
await assertTableCount(ctx.database, tableName, 1);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
await assertTableCount(ctx.database, tableName, 0);
});
it('should skip recent records', async () => {
const { sut, ctx } = setup();
const keep = {
id: v4(),
assetId: v4(),
ownerId: v4(),
deletedAt: DateTime.now().minus({ days: 25 }).toISO(),
};
const remove = {
id: v4(),
assetId: v4(),
ownerId: v4(),
deletedAt: DateTime.now().minus({ days: 35 }).toISO(),
};
await ctx.database.insertInto('asset_audit').values([keep, remove]).execute();
await assertTableCount(ctx.database, 'asset_audit', 2);
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
const after = await ctx.database.selectFrom('asset_audit').select(['id']).execute();
expect(after).toHaveLength(1);
expect(after[0].id).toBe(keep.id);
});
});
});

View file

@ -0,0 +1,145 @@
import { Kysely } from 'kysely';
import { JobStatus } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { TagRepository } from 'src/repositories/tag.repository';
import { DB } from 'src/schema';
import { TagService } from 'src/services/tag.service';
import { upsertTags } from 'src/utils/tag';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(TagService, {
database: db || defaultDatabase,
real: [AssetRepository, TagRepository, AccessRepository],
mock: [EventRepository, LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(TagService.name, () => {
describe('addAssets', () => {
it('should lock exif column', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const [tag] = await upsertTags(ctx.get(TagRepository), { userId: user.id, tags: ['tag-1'] });
const authDto = factory.auth({ user });
await sut.addAssets(authDto, tag.id, { ids: [asset.id] });
await expect(
ctx.database
.selectFrom('asset_exif')
.select(['lockedProperties', 'tags'])
.where('assetId', '=', asset.id)
.executeTakeFirstOrThrow(),
).resolves.toEqual({
lockedProperties: ['tags'],
tags: ['tag-1'],
});
await expect(ctx.get(TagRepository).getByValue(user.id, 'tag-1')).resolves.toEqual(
expect.objectContaining({ id: tag.id }),
);
await expect(ctx.get(TagRepository).getAssetIds(tag.id, [asset.id])).resolves.toContain(asset.id);
});
});
describe('deleteEmptyTags', () => {
it('single tag exists, not connected to any assets, and is deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const tagRepo = ctx.get(TagRepository);
const [tag] = await upsertTags(tagRepo, { userId: user.id, tags: ['tag-1'] });
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toBeUndefined();
});
it('single tag exists, connected to one asset, and is not deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const tagRepo = ctx.get(TagRepository);
const [tag] = await upsertTags(tagRepo, { userId: user.id, tags: ['tag-1'] });
await ctx.newTagAsset({ tagIds: [tag.id], assetIds: [asset.id] });
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
});
it('hierarchical tag exists, and the parent is connected to an asset, and the child is deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const tagRepo = ctx.get(TagRepository);
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
await ctx.newTagAsset({ tagIds: [parentTag.id], assetIds: [asset.id] });
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
expect.objectContaining({ id: childTag.id }),
);
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toBeUndefined();
});
it('hierarchical tag exists, and only the child is connected to an asset, and nothing is deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const tagRepo = ctx.get(TagRepository);
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
await ctx.newTagAsset({ tagIds: [childTag.id], assetIds: [asset.id] });
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
expect.objectContaining({ id: childTag.id }),
);
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
expect.objectContaining({ id: childTag.id }),
);
});
it('hierarchical tag exists, and neither parent nor child is connected to an asset, and both are deleted', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const tagRepo = ctx.get(TagRepository);
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
expect.objectContaining({ id: parentTag.id }),
);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
expect.objectContaining({ id: childTag.id }),
);
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toBeUndefined();
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toBeUndefined();
});
});
});

View file

@ -0,0 +1,209 @@
import { BadRequestException } from '@nestjs/common';
import { Kysely } from 'kysely';
import { AssetVisibility } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { DB } from 'src/schema';
import { TimelineService } from 'src/services/timeline.service';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(TimelineService, {
database: db || defaultDatabase,
real: [AssetRepository, AccessRepository, PartnerRepository],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(TimelineService.name, () => {
describe('getTimeBuckets', () => {
it('should get time buckets by month', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const dates = [new Date('1970-01-01'), new Date('1970-02-10'), new Date('1970-02-11'), new Date('1970-02-11')];
for (const localDateTime of dates) {
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
}
const response = sut.getTimeBuckets(auth, {});
await expect(response).resolves.toEqual([
{ count: 3, timeBucket: '1970-02-01' },
{ count: 1, timeBucket: '1970-01-01' },
]);
});
it('should return error if time bucket is requested with partners asset and archived', async () => {
const { sut } = setup();
const auth = factory.auth();
const response1 = sut.getTimeBuckets(auth, { withPartners: true, visibility: AssetVisibility.Archive });
await expect(response1).rejects.toBeInstanceOf(BadRequestException);
await expect(response1).rejects.toThrow(
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
);
const response2 = sut.getTimeBuckets(auth, { withPartners: true });
await expect(response2).rejects.toBeInstanceOf(BadRequestException);
await expect(response2).rejects.toThrow(
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
);
});
it('should return error if time bucket is requested with partners asset and favorite', async () => {
const { sut } = setup();
const auth = factory.auth();
const response1 = sut.getTimeBuckets(auth, { withPartners: true, isFavorite: false });
await expect(response1).rejects.toBeInstanceOf(BadRequestException);
await expect(response1).rejects.toThrow(
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
);
const response2 = sut.getTimeBuckets(auth, { withPartners: true, isFavorite: true });
await expect(response2).rejects.toBeInstanceOf(BadRequestException);
await expect(response2).rejects.toThrow(
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
);
});
it('should return error if time bucket is requested with partners asset and trash', async () => {
const { sut } = setup();
const auth = factory.auth();
const response = sut.getTimeBuckets(auth, { withPartners: true, isTrashed: true });
await expect(response).rejects.toBeInstanceOf(BadRequestException);
await expect(response).rejects.toThrow(
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
);
});
it('should not allow access for unrelated shared links', async () => {
const { sut } = setup();
const auth = factory.auth({ sharedLink: {} });
const response = sut.getTimeBuckets(auth, {});
await expect(response).rejects.toBeInstanceOf(BadRequestException);
await expect(response).rejects.toThrow('Not found or no timeline.read access');
});
});
describe('getTimeBucket', () => {
it('should return time bucket', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({
ownerId: user.id,
localDateTime: new Date('1970-02-12'),
deletedAt: new Date(),
});
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const auth = factory.auth({ user: { id: user.id } });
const rawResponse = await sut.getTimeBucket(auth, { timeBucket: '1970-02-01', isTrashed: true });
const response = JSON.parse(rawResponse);
expect(response).toEqual(expect.objectContaining({ isTrashed: [true] }));
});
it('should handle a bucket without any assets', async () => {
const { sut } = setup();
const rawResponse = await sut.getTimeBucket(factory.auth(), { timeBucket: '1970-02-01' });
const response = JSON.parse(rawResponse);
expect(response).toEqual({
city: [],
country: [],
duration: [],
id: [],
visibility: [],
isFavorite: [],
isImage: [],
isTrashed: [],
livePhotoVideoId: [],
fileCreatedAt: [],
localOffsetHours: [],
ownerId: [],
projectionType: [],
ratio: [],
status: [],
thumbhash: [],
});
});
it('should handle 5 digit years', async () => {
const { sut } = setup();
const rawResponse = await sut.getTimeBucket(factory.auth(), { timeBucket: '012345-01-01' });
const response = JSON.parse(rawResponse);
expect(response).toEqual(expect.objectContaining({ id: [] }));
});
it('should return time bucket in trash', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({
ownerId: user.id,
localDateTime: new Date('1970-02-12'),
deletedAt: new Date(),
});
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const auth = factory.auth({ user: { id: user.id } });
const rawResponse = await sut.getTimeBucket(auth, { timeBucket: '1970-02-01', isTrashed: true });
const response = JSON.parse(rawResponse);
expect(response).toEqual(expect.objectContaining({ isTrashed: [true] }));
});
it('should return false for favorite status unless asset owner', async () => {
const { sut, ctx } = setup();
const [{ asset: asset1 }, { asset: asset2 }] = await Promise.all([
ctx.newUser().then(async ({ user }) => {
const result = await ctx.newAsset({
ownerId: user.id,
fileCreatedAt: new Date('1970-02-12'),
localDateTime: new Date('1970-02-12'),
isFavorite: true,
});
await ctx.newExif({ assetId: result.asset.id, make: 'Canon' });
return result;
}),
ctx.newUser().then(async ({ user }) => {
const result = await ctx.newAsset({
ownerId: user.id,
fileCreatedAt: new Date('1970-02-13'),
localDateTime: new Date('1970-02-13'),
isFavorite: true,
});
await ctx.newExif({ assetId: result.asset.id, make: 'Canon' });
return result;
}),
]);
await Promise.all([
ctx.newPartner({ sharedById: asset1.ownerId, sharedWithId: asset2.ownerId }),
ctx.newPartner({ sharedById: asset2.ownerId, sharedWithId: asset1.ownerId }),
]);
const auth1 = factory.auth({ user: { id: asset1.ownerId } });
const rawResponse1 = await sut.getTimeBucket(auth1, {
timeBucket: '1970-02-01',
withPartners: true,
visibility: AssetVisibility.Timeline,
});
const response1 = JSON.parse(rawResponse1);
expect(response1).toEqual(expect.objectContaining({ id: [asset2.id, asset1.id], isFavorite: [false, true] }));
const auth2 = factory.auth({ user: { id: asset2.ownerId } });
const rawResponse2 = await sut.getTimeBucket(auth2, {
timeBucket: '1970-02-01',
withPartners: true,
visibility: AssetVisibility.Timeline,
});
const response2 = JSON.parse(rawResponse2);
expect(response2).toEqual(expect.objectContaining({ id: [asset2.id, asset1.id], isFavorite: [true, false] }));
});
});
});

View file

@ -0,0 +1,181 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { ImmichEnvironment, JobName, JobStatus } from 'src/enum';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { UserService } from 'src/services/user.service';
import { mediumFactory, newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
process.env.IMMICH_ENV = ImmichEnvironment.Testing;
return newMediumService(UserService, {
database: db || defaultDatabase,
real: [CryptoRepository, ConfigRepository, SystemMetadataRepository, UserRepository],
mock: [LoggingRepository, JobRepository, EventRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
const { ctx } = setup();
await ctx.newUser({ isAdmin: true, email: 'admin@immich.cloud' });
});
describe(UserService.name, () => {
describe('create', () => {
it('should create a user', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const user = mediumFactory.userInsert();
await expect(sut.createUser({ name: user.name, email: user.email })).resolves.toEqual(
expect.objectContaining({ name: user.name, email: user.email }),
);
});
it('should reject user with duplicate email', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const user = mediumFactory.userInsert();
await expect(sut.createUser({ email: user.email })).resolves.toMatchObject({ email: user.email });
await expect(sut.createUser({ email: user.email })).rejects.toThrow('User exists');
});
it('should not return password', async () => {
const { sut, ctx } = setup();
ctx.getMock(EventRepository).emit.mockResolvedValue();
const dto = mediumFactory.userInsert({ password: 'password' });
const user = await sut.createUser({ email: dto.email, password: 'password' });
expect((user as any).password).toBeUndefined();
});
});
describe('search', () => {
it('should get users', async () => {
const { sut, ctx } = setup();
const { user: user1 } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const auth = factory.auth({ user: user1 });
await expect(sut.search(auth)).resolves.toEqual(
expect.arrayContaining([
expect.objectContaining({ email: user1.email }),
expect.objectContaining({ email: user2.email }),
]),
);
});
});
describe('get', () => {
it('should get a user', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
await expect(sut.get(user.id)).resolves.toEqual(
expect.objectContaining({
id: user.id,
name: user.name,
email: user.email,
}),
);
});
it('should not return password', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const result = await sut.get(user.id);
expect((result as any).password).toBeUndefined();
});
});
describe('updateMe', () => {
it('should update a user', async () => {
const { sut, ctx } = setup();
const { user, result: before } = await ctx.newUser();
const auth = factory.auth({ user: { id: user.id } });
const after = await sut.updateMe(auth, { name: `${before.name} Updated` });
expect(before.updatedAt).toBeDefined();
expect(after.updatedAt).toBeDefined();
expect(before.updatedAt).not.toEqual(after.updatedAt);
});
});
describe('setLicense', () => {
it('should set a license', async () => {
const license = {
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
activationKey:
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
};
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user: { id: user.id } });
await expect(sut.getLicense(auth)).rejects.toThrowError();
const after = await sut.setLicense(auth, license);
expect(after.licenseKey).toEqual(license.licenseKey);
expect(after.activationKey).toEqual(license.activationKey);
const getResponse = await sut.getLicense(auth);
expect(getResponse).toEqual(after);
});
});
describe.sequential('handleUserDeleteCheck', () => {
beforeEach(async () => {
const { sut } = setup();
// These tests specifically have to be sequential otherwise we hit race conditions with config changes applying in incorrect tests
const config = await sut.getConfig({ withCache: false });
config.user.deleteDelay = 7;
await sut.updateConfig(config);
});
it('should work when there are no deleted users', async () => {
const { sut, ctx } = setup();
const jobMock = ctx.getMock(JobRepository);
jobMock.queueAll.mockResolvedValue(void 0);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
});
it('should work when there is a user to delete', async () => {
const { sut, ctx } = setup(await getKyselyDB());
const jobMock = ctx.getMock(JobRepository);
const { user } = await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 60 }).toJSDate() });
jobMock.queueAll.mockResolvedValue(void 0);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([{ name: JobName.UserDelete, data: { id: user.id } }]);
});
it('should skip a recently deleted user', async () => {
const { sut, ctx } = setup(await getKyselyDB());
const jobMock = ctx.getMock(JobRepository);
await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 5 }).toJSDate() });
jobMock.queueAll.mockResolvedValue(void 0);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
});
it('should respect a custom user delete delay', async () => {
const { sut, ctx } = setup(await getKyselyDB());
const jobMock = ctx.getMock(JobRepository);
await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 25 }).toJSDate() });
jobMock.queueAll.mockResolvedValue(void 0);
const config = await sut.getConfig({ withCache: false });
config.user.deleteDelay = 30;
await sut.updateConfig(config);
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
});
});
});

View file

@ -0,0 +1,70 @@
import { Kysely } from 'kysely';
import { serverVersion } from 'src/constants';
import { JobName } from 'src/enum';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
import { DB } from 'src/schema';
import { VersionService } from 'src/services/version.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(VersionService, {
database: db || defaultDatabase,
real: [DatabaseRepository, VersionHistoryRepository],
mock: [LoggingRepository, JobRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(VersionService.name, () => {
describe('onBootstrap', () => {
it('record the current version on startup', async () => {
const { sut, ctx } = setup();
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
const itemsBefore = await versionHistoryRepo.getAll();
expect(itemsBefore).toHaveLength(0);
await sut.onBootstrap();
const itemsAfter = await versionHistoryRepo.getAll();
expect(itemsAfter).toHaveLength(1);
expect(itemsAfter[0]).toEqual({
createdAt: expect.any(Date),
id: expect.any(String),
version: serverVersion.toString(),
});
});
it('should queue memory creation when upgrading from 1.128.0', async () => {
const { sut, ctx } = setup();
const jobMock = ctx.getMock(JobRepository);
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
jobMock.queue.mockResolvedValue(void 0);
await versionHistoryRepo.create({ version: 'v1.128.0' });
await sut.onBootstrap();
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.MemoryGenerate });
});
it('should not queue memory creation when upgrading from 1.129.0', async () => {
const { sut, ctx } = setup();
const jobMock = ctx.getMock(JobRepository);
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
await versionHistoryRepo.create({ version: 'v1.129.0' });
await sut.onBootstrap();
expect(jobMock.queue).not.toHaveBeenCalled();
});
});
});

View file

@ -0,0 +1,776 @@
import { Kysely } from 'kysely';
import { PluginContext, PluginTriggerType } from 'src/enum';
import { AccessRepository } from 'src/repositories/access.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { PluginRepository } from 'src/repositories/plugin.repository';
import { WorkflowRepository } from 'src/repositories/workflow.repository';
import { DB } from 'src/schema';
import { WorkflowService } from 'src/services/workflow.service';
import { newMediumService } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(WorkflowService, {
database: db || defaultDatabase,
real: [WorkflowRepository, PluginRepository, AccessRepository],
mock: [LoggingRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(WorkflowService.name, () => {
let testPluginId: string;
let testFilterId: string;
let testActionId: string;
beforeAll(async () => {
// Create a test plugin with filters and actions once for all tests
const pluginRepo = new PluginRepository(defaultDatabase);
const result = await pluginRepo.loadPlugin(
{
name: 'test-core-plugin',
title: 'Test Core Plugin',
description: 'A test core plugin for workflow tests',
author: 'Test Author',
version: '1.0.0',
wasm: {
path: '/test/path.wasm',
},
filters: [
{
methodName: 'test-filter',
title: 'Test Filter',
description: 'A test filter',
supportedContexts: [PluginContext.Asset],
schema: undefined,
},
],
actions: [
{
methodName: 'test-action',
title: 'Test Action',
description: 'A test action',
supportedContexts: [PluginContext.Asset],
schema: undefined,
},
],
},
'/plugins/test-core-plugin',
);
testPluginId = result.plugin.id;
testFilterId = result.filters[0].id;
testActionId = result.actions[0].id;
});
afterAll(async () => {
await defaultDatabase.deleteFrom('plugin').where('id', '=', testPluginId).execute();
});
describe('create', () => {
it('should create a workflow without filters or actions', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const workflow = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'A test workflow',
enabled: true,
filters: [],
actions: [],
});
expect(workflow).toMatchObject({
id: expect.any(String),
ownerId: user.id,
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'A test workflow',
enabled: true,
filters: [],
actions: [],
});
});
it('should create a workflow with filters and actions', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const workflow = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow-with-relations',
description: 'A test workflow with filters and actions',
enabled: true,
filters: [
{
pluginFilterId: testFilterId,
filterConfig: { key: 'value' },
},
],
actions: [
{
pluginActionId: testActionId,
actionConfig: { action: 'test' },
},
],
});
expect(workflow).toMatchObject({
id: expect.any(String),
ownerId: user.id,
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow-with-relations',
enabled: true,
});
expect(workflow.filters).toHaveLength(1);
expect(workflow.filters[0]).toMatchObject({
id: expect.any(String),
workflowId: workflow.id,
pluginFilterId: testFilterId,
filterConfig: { key: 'value' },
order: 0,
});
expect(workflow.actions).toHaveLength(1);
expect(workflow.actions[0]).toMatchObject({
id: expect.any(String),
workflowId: workflow.id,
pluginActionId: testActionId,
actionConfig: { action: 'test' },
order: 0,
});
});
it('should throw error when creating workflow with invalid filter', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
await expect(
sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'invalid-workflow',
description: 'A workflow with invalid filter',
enabled: true,
filters: [{ pluginFilterId: factory.uuid(), filterConfig: { key: 'value' } }],
actions: [],
}),
).rejects.toThrow('Invalid filter ID');
});
it('should throw error when creating workflow with invalid action', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
await expect(
sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'invalid-workflow',
description: 'A workflow with invalid action',
enabled: true,
filters: [],
actions: [{ pluginActionId: factory.uuid(), actionConfig: { action: 'test' } }],
}),
).rejects.toThrow('Invalid action ID');
});
it('should throw error when filter does not support trigger context', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
// Create a plugin with a filter that only supports Album context
const pluginRepo = new PluginRepository(defaultDatabase);
const result = await pluginRepo.loadPlugin(
{
name: 'album-only-plugin',
title: 'Album Only Plugin',
description: 'Plugin with album-only filter',
author: 'Test Author',
version: '1.0.0',
wasm: { path: '/test/album-plugin.wasm' },
filters: [
{
methodName: 'album-filter',
title: 'Album Filter',
description: 'A filter that only works with albums',
supportedContexts: [PluginContext.Album],
schema: undefined,
},
],
},
'/plugins/test-core-plugin',
);
await expect(
sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'invalid-context-workflow',
description: 'A workflow with context mismatch',
enabled: true,
filters: [{ pluginFilterId: result.filters[0].id }],
actions: [],
}),
).rejects.toThrow('does not support asset context');
});
it('should throw error when action does not support trigger context', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
// Create a plugin with an action that only supports Person context
const pluginRepo = new PluginRepository(defaultDatabase);
const result = await pluginRepo.loadPlugin(
{
name: 'person-only-plugin',
title: 'Person Only Plugin',
description: 'Plugin with person-only action',
author: 'Test Author',
version: '1.0.0',
wasm: { path: '/test/person-plugin.wasm' },
actions: [
{
methodName: 'person-action',
title: 'Person Action',
description: 'An action that only works with persons',
supportedContexts: [PluginContext.Person],
schema: undefined,
},
],
},
'/plugins/test-core-plugin',
);
await expect(
sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'invalid-context-workflow',
description: 'A workflow with context mismatch',
enabled: true,
filters: [],
actions: [{ pluginActionId: result.actions[0].id }],
}),
).rejects.toThrow('does not support asset context');
});
it('should create workflow with multiple filters and actions in correct order', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const workflow = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'multi-step-workflow',
description: 'A workflow with multiple filters and actions',
enabled: true,
filters: [
{ pluginFilterId: testFilterId, filterConfig: { step: 1 } },
{ pluginFilterId: testFilterId, filterConfig: { step: 2 } },
],
actions: [
{ pluginActionId: testActionId, actionConfig: { step: 1 } },
{ pluginActionId: testActionId, actionConfig: { step: 2 } },
{ pluginActionId: testActionId, actionConfig: { step: 3 } },
],
});
expect(workflow.filters).toHaveLength(2);
expect(workflow.filters[0].order).toBe(0);
expect(workflow.filters[0].filterConfig).toEqual({ step: 1 });
expect(workflow.filters[1].order).toBe(1);
expect(workflow.filters[1].filterConfig).toEqual({ step: 2 });
expect(workflow.actions).toHaveLength(3);
expect(workflow.actions[0].order).toBe(0);
expect(workflow.actions[1].order).toBe(1);
expect(workflow.actions[2].order).toBe(2);
});
});
describe('getAll', () => {
it('should return all workflows for a user', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const workflow1 = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'workflow-1',
description: 'First workflow',
enabled: true,
filters: [],
actions: [],
});
const workflow2 = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'workflow-2',
description: 'Second workflow',
enabled: false,
filters: [],
actions: [],
});
const workflows = await sut.getAll(auth);
expect(workflows).toHaveLength(2);
expect(workflows).toEqual(
expect.arrayContaining([
expect.objectContaining({ id: workflow1.id, name: 'workflow-1' }),
expect.objectContaining({ id: workflow2.id, name: 'workflow-2' }),
]),
);
});
it('should return empty array when user has no workflows', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const workflows = await sut.getAll(auth);
expect(workflows).toEqual([]);
});
it('should not return workflows from other users', async () => {
const { sut, ctx } = setup();
const { user: user1 } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const auth1 = factory.auth({ user: user1 });
const auth2 = factory.auth({ user: user2 });
await sut.create(auth1, {
triggerType: PluginTriggerType.AssetCreate,
name: 'user1-workflow',
description: 'User 1 workflow',
enabled: true,
filters: [],
actions: [],
});
const user2Workflows = await sut.getAll(auth2);
expect(user2Workflows).toEqual([]);
});
});
describe('get', () => {
it('should return a specific workflow by id', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'A test workflow',
enabled: true,
filters: [{ pluginFilterId: testFilterId, filterConfig: { key: 'value' } }],
actions: [{ pluginActionId: testActionId, actionConfig: { action: 'test' } }],
});
const workflow = await sut.get(auth, created.id);
expect(workflow).toMatchObject({
id: created.id,
name: 'test-workflow',
description: 'A test workflow',
enabled: true,
});
expect(workflow.filters).toHaveLength(1);
expect(workflow.actions).toHaveLength(1);
});
it('should throw error when workflow does not exist', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
await expect(sut.get(auth, '66da82df-e424-4bf4-b6f3-5d8e71620dae')).rejects.toThrow();
});
it('should throw error when user does not have access to workflow', async () => {
const { sut, ctx } = setup();
const { user: user1 } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const auth1 = factory.auth({ user: user1 });
const auth2 = factory.auth({ user: user2 });
const workflow = await sut.create(auth1, {
triggerType: PluginTriggerType.AssetCreate,
name: 'private-workflow',
description: 'Private workflow',
enabled: true,
filters: [],
actions: [],
});
await expect(sut.get(auth2, workflow.id)).rejects.toThrow();
});
});
describe('update', () => {
it('should update workflow basic fields', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'original-workflow',
description: 'Original description',
enabled: true,
filters: [],
actions: [],
});
const updated = await sut.update(auth, created.id, {
name: 'updated-workflow',
description: 'Updated description',
enabled: false,
});
expect(updated).toMatchObject({
id: created.id,
name: 'updated-workflow',
description: 'Updated description',
enabled: false,
});
});
it('should update workflow filters', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [{ pluginFilterId: testFilterId, filterConfig: { old: 'config' } }],
actions: [],
});
const updated = await sut.update(auth, created.id, {
filters: [
{ pluginFilterId: testFilterId, filterConfig: { new: 'config' } },
{ pluginFilterId: testFilterId, filterConfig: { second: 'filter' } },
],
});
expect(updated.filters).toHaveLength(2);
expect(updated.filters[0].filterConfig).toEqual({ new: 'config' });
expect(updated.filters[1].filterConfig).toEqual({ second: 'filter' });
});
it('should update workflow actions', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [],
actions: [{ pluginActionId: testActionId, actionConfig: { old: 'config' } }],
});
const updated = await sut.update(auth, created.id, {
actions: [
{ pluginActionId: testActionId, actionConfig: { new: 'config' } },
{ pluginActionId: testActionId, actionConfig: { second: 'action' } },
],
});
expect(updated.actions).toHaveLength(2);
expect(updated.actions[0].actionConfig).toEqual({ new: 'config' });
expect(updated.actions[1].actionConfig).toEqual({ second: 'action' });
});
it('should clear filters when updated with empty array', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [{ pluginFilterId: testFilterId, filterConfig: { key: 'value' } }],
actions: [],
});
const updated = await sut.update(auth, created.id, {
filters: [],
});
expect(updated.filters).toHaveLength(0);
});
it('should throw error when no fields to update', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [],
actions: [],
});
await expect(sut.update(auth, created.id, {})).rejects.toThrow('No fields to update');
});
it('should throw error when updating non-existent workflow', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
await expect(sut.update(auth, factory.uuid(), { name: 'updated-name' })).rejects.toThrow();
});
it('should throw error when user does not have access to update workflow', async () => {
const { sut, ctx } = setup();
const { user: user1 } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const auth1 = factory.auth({ user: user1 });
const auth2 = factory.auth({ user: user2 });
const workflow = await sut.create(auth1, {
triggerType: PluginTriggerType.AssetCreate,
name: 'private-workflow',
description: 'Private',
enabled: true,
filters: [],
actions: [],
});
await expect(
sut.update(auth2, workflow.id, {
name: 'hacked-workflow',
}),
).rejects.toThrow();
});
it('should throw error when updating with invalid filter', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [],
actions: [],
});
await expect(
sut.update(auth, created.id, {
filters: [{ pluginFilterId: factory.uuid(), filterConfig: {} }],
}),
).rejects.toThrow();
});
it('should throw error when updating with invalid action', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [],
actions: [],
});
await expect(
sut.update(auth, created.id, { actions: [{ pluginActionId: factory.uuid(), actionConfig: {} }] }),
).rejects.toThrow();
});
it('should update trigger type', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.PersonRecognized,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [],
actions: [],
});
await sut.update(auth, created.id, {
triggerType: PluginTriggerType.AssetCreate,
});
const fetched = await sut.get(auth, created.id);
expect(fetched.triggerType).toBe(PluginTriggerType.AssetCreate);
});
it('should add filters', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [],
actions: [],
});
await sut.update(auth, created.id, {
filters: [
{ pluginFilterId: testFilterId, filterConfig: { first: true } },
{ pluginFilterId: testFilterId, filterConfig: { second: true } },
],
});
const fetched = await sut.get(auth, created.id);
expect(fetched.filters).toHaveLength(2);
expect(fetched.filters[0].filterConfig).toEqual({ first: true });
expect(fetched.filters[1].filterConfig).toEqual({ second: true });
});
it('should replace existing filters', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [{ pluginFilterId: testFilterId, filterConfig: { original: true } }],
actions: [],
});
await sut.update(auth, created.id, {
filters: [{ pluginFilterId: testFilterId, filterConfig: { replaced: true } }],
});
const fetched = await sut.get(auth, created.id);
expect(fetched.filters).toHaveLength(1);
expect(fetched.filters[0].filterConfig).toEqual({ replaced: true });
});
it('should remove existing filters', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const created = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [{ pluginFilterId: testFilterId, filterConfig: { toRemove: true } }],
actions: [],
});
await sut.update(auth, created.id, {
filters: [],
});
const fetched = await sut.get(auth, created.id);
expect(fetched.filters).toHaveLength(0);
});
});
describe('delete', () => {
it('should delete a workflow', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const workflow = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [],
actions: [],
});
await sut.delete(auth, workflow.id);
await expect(sut.get(auth, workflow.id)).rejects.toThrow('Not found or no workflow.read access');
});
it('should delete workflow with filters and actions', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
const workflow = await sut.create(auth, {
triggerType: PluginTriggerType.AssetCreate,
name: 'test-workflow',
description: 'Test',
enabled: true,
filters: [{ pluginFilterId: testFilterId, filterConfig: {} }],
actions: [{ pluginActionId: testActionId, actionConfig: {} }],
});
await sut.delete(auth, workflow.id);
await expect(sut.get(auth, workflow.id)).rejects.toThrow('Not found or no workflow.read access');
});
it('should throw error when deleting non-existent workflow', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const auth = factory.auth({ user });
await expect(sut.delete(auth, factory.uuid())).rejects.toThrow();
});
it('should throw error when user does not have access to delete workflow', async () => {
const { sut, ctx } = setup();
const { user: user1 } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const auth1 = factory.auth({ user: user1 });
const auth2 = factory.auth({ user: user2 });
const workflow = await sut.create(auth1, {
triggerType: PluginTriggerType.AssetCreate,
name: 'private-workflow',
description: 'Private',
enabled: true,
filters: [],
actions: [],
});
await expect(sut.delete(auth2, workflow.id)).rejects.toThrow();
});
});
});

View file

@ -0,0 +1,373 @@
import { Kysely } from 'kysely';
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
import { AssetRepository } from 'src/repositories/asset.repository';
import { DB } from 'src/schema';
import { updateLockedColumns } from 'src/utils/database';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
const updateSyncAck = {
ack: expect.stringContaining(SyncEntityType.AlbumAssetExifUpdateV1),
data: {},
type: SyncEntityType.SyncAckV1,
};
const backfillSyncAck = {
ack: expect.stringContaining(SyncEntityType.AlbumAssetExifBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.AlbumAssetExifsV1, () => {
it('should detect and sync the first album asset exif', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(response).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: {
assetId: asset.id,
city: null,
country: null,
dateTimeOriginal: null,
description: '',
exifImageHeight: null,
exifImageWidth: null,
exposureTime: null,
fNumber: null,
fileSizeInByte: null,
focalLength: null,
fps: null,
iso: null,
latitude: null,
lensModel: null,
longitude: null,
make: 'Canon',
model: null,
modifyDate: null,
orientation: null,
profileDescription: null,
projectionType: null,
rating: null,
state: null,
timeZone: null,
},
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
});
it('should sync album asset exif for own user', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncAckV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumAssetExifCreateV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
it('should not sync album asset exif for unrelated user', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user3.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: user3.id, role: AlbumUserRole.Editor });
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
});
it('should backfill album assets exif when a user shares an album with you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id });
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
const { asset: asset1User2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: asset1User2.id, make: 'asset1User2' });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset1User2.id });
await wait(2);
const { asset: asset2User2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: asset2User2.id, make: 'asset2User2' });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset2User2.id });
await wait(2);
await ctx.newAlbumAsset({ albumId: album1.id, assetId: asset2User2.id });
await wait(2);
const { asset: asset3User2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset3User2.id });
await ctx.newExif({ assetId: asset3User2.id, make: 'asset3User2' });
await wait(2);
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(response).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: asset2User2.id,
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album asset exif sync
await ctx.syncAckAll(auth, response);
// create a second album
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
// should backfill the album user
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: asset1User2.id,
}),
type: SyncEntityType.AlbumAssetExifBackfillV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: asset2User2.id,
}),
type: SyncEntityType.AlbumAssetExifBackfillV1,
},
backfillSyncAck,
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: asset3User2.id,
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
});
it('should sync old asset exif when a user adds them to an album they share you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset: firstAsset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'firstAsset' });
await ctx.newExif({ assetId: firstAsset.id, make: 'firstAsset' });
const { asset: secondAsset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'secondAsset' });
await ctx.newExif({ assetId: secondAsset.id, make: 'secondAsset' });
const { asset: album1Asset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'album1Asset' });
await ctx.newExif({ assetId: album1Asset.id, make: 'album1Asset' });
const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id });
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: firstAsset.id });
await wait(2);
await ctx.newAlbumAsset({ albumId: album1.id, assetId: album1Asset.id });
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const firstAlbumResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(firstAlbumResponse).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: album1Asset.id,
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, firstAlbumResponse);
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: firstAsset.id,
}),
type: SyncEntityType.AlbumAssetExifBackfillV1,
},
backfillSyncAck,
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album asset sync
await ctx.syncAckAll(auth, response);
await ctx.newAlbumAsset({ albumId: album2.id, assetId: secondAsset.id });
await wait(2);
// should backfill the new asset even though it's older than the first asset
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(newResponse).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: secondAsset.id,
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
});
it('should sync asset exif updates for an album shared with you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: asset.id, make: 'asset' });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await wait(2);
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(response).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: asset.id,
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
// update the asset
const assetRepository = ctx.get(AssetRepository);
await assetRepository.upsertExif(
updateLockedColumns({
assetId: asset.id,
city: 'New City',
}),
{ lockedPropertiesBehavior: 'append' },
);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: asset.id,
city: 'New City',
}),
type: SyncEntityType.AlbumAssetExifUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
it('should sync delayed asset exif creates for an album shared with you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset: assetWithExif } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: assetWithExif.id, make: 'assetWithExif' });
const { asset: assetDelayedExif } = await ctx.newAsset({ ownerId: user2.id });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
const { asset: newerAsset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: newerAsset.id, make: 'newerAsset' });
await ctx.newAlbumAsset({ albumId: album.id, assetId: assetWithExif.id });
await wait(2);
await ctx.newAlbumAsset({ albumId: album.id, assetId: assetDelayedExif.id });
await wait(2);
await ctx.newAlbumAsset({ albumId: album.id, assetId: newerAsset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
expect(response).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetWithExif.id,
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: newerAsset.id,
}),
type: SyncEntityType.AlbumAssetExifCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
// update the asset
const assetRepository = ctx.get(AssetRepository);
await assetRepository.upsertExif(
updateLockedColumns({
assetId: assetDelayedExif.id,
city: 'Delayed Exif',
}),
{ lockedPropertiesBehavior: 'append' },
);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetDelayedExif.id,
city: 'Delayed Exif',
}),
type: SyncEntityType.AlbumAssetExifUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View file

@ -0,0 +1,312 @@
import { Kysely } from 'kysely';
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
import { AssetRepository } from 'src/repositories/asset.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
const updateSyncAck = {
ack: expect.stringContaining(SyncEntityType.AlbumAssetUpdateV1),
data: {},
type: SyncEntityType.SyncAckV1,
};
const backfillSyncAck = {
ack: expect.stringContaining(SyncEntityType.AlbumAssetBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.AlbumAssetsV1, () => {
it('should detect and sync the first album asset', async () => {
const originalFileName = 'firstAsset';
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const date = new Date().toISOString();
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({
originalFileName,
ownerId: user2.id,
checksum: Buffer.from(checksum, 'base64'),
thumbhash: Buffer.from(thumbhash, 'base64'),
fileCreatedAt: date,
fileModifiedAt: date,
localDateTime: date,
deletedAt: null,
duration: '0:10:00.00000',
livePhotoVideoId: null,
stackId: null,
libraryId: null,
width: 1920,
height: 1080,
});
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: {
id: asset.id,
originalFileName,
ownerId: asset.ownerId,
thumbhash,
checksum,
deletedAt: asset.deletedAt,
fileCreatedAt: asset.fileCreatedAt,
fileModifiedAt: asset.fileModifiedAt,
isFavorite: asset.isFavorite,
localDateTime: asset.localDateTime,
type: asset.type,
visibility: asset.visibility,
duration: asset.duration,
livePhotoVideoId: asset.livePhotoVideoId,
stackId: asset.stackId,
libraryId: asset.libraryId,
width: asset.width,
height: asset.height,
isEdited: asset.isEdited,
},
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
});
it('should sync album asset for own user', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.SyncAckV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumAssetCreateV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
it('should not sync album asset for unrelated user', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user3.id });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: user3.id, role: AlbumUserRole.Editor });
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
});
it('should backfill album assets when a user shares an album with you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id });
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
const { asset: asset1User2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset1User2.id });
await wait(2);
const { asset: asset2User2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset2User2.id });
await wait(2);
await ctx.newAlbumAsset({ albumId: album1.id, assetId: asset2User2.id });
await wait(2);
const { asset: asset3User2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset3User2.id });
await wait(2);
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
id: asset2User2.id,
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album asset sync
await ctx.syncAckAll(auth, response);
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
// should backfill the album user
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: asset1User2.id,
}),
type: SyncEntityType.AlbumAssetBackfillV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
id: asset2User2.id,
}),
type: SyncEntityType.AlbumAssetBackfillV1,
},
backfillSyncAck,
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
id: asset3User2.id,
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
});
it('should sync old assets when a user adds them to an album they share you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset: firstAsset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'firstAsset' });
const { asset: secondAsset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'secondAsset' });
const { asset: album1Asset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'album1Asset' });
const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id });
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: firstAsset.id });
await wait(2);
await ctx.newAlbumAsset({ albumId: album1.id, assetId: album1Asset.id });
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const firstAlbumResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(firstAlbumResponse).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
id: album1Asset.id,
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, firstAlbumResponse);
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: firstAsset.id,
}),
type: SyncEntityType.AlbumAssetBackfillV1,
},
backfillSyncAck,
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album asset sync
await ctx.syncAckAll(auth, response);
await ctx.newAlbumAsset({ albumId: album2.id, assetId: secondAsset.id });
await wait(2);
// should backfill the new asset even though it's older than the first asset
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(newResponse).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
id: secondAsset.id,
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
});
it('should sync asset updates for an album shared with you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id, isFavorite: false });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await wait(2);
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(response).toEqual([
updateSyncAck,
{
ack: expect.any(String),
data: expect.objectContaining({
id: asset.id,
}),
type: SyncEntityType.AlbumAssetCreateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
// update the asset
const assetRepository = ctx.get(AssetRepository);
await assetRepository.update({
id: asset.id,
isFavorite: true,
});
const updateResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
expect(updateResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: asset.id,
isFavorite: true,
}),
type: SyncEntityType.AlbumAssetUpdateV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View file

@ -0,0 +1,265 @@
import { Kysely } from 'kysely';
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
import { AlbumRepository } from 'src/repositories/album.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.AlbumToAssetsV1, () => {
it('should detect and sync the first album to asset relation', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
assetId: asset.id,
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should sync album to asset for owned albums', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
assetId: asset.id,
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should detect and sync the album to asset for shared albums', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
assetId: asset.id,
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should not sync album to asset for an album owned by another user', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should backfill album to assets when a user shares an album with you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { asset: album1Asset } = await ctx.newAsset({ ownerId: user2.id });
const { asset: album2Asset } = await ctx.newAsset({ ownerId: auth.user.id });
// Backfill album
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumAsset({ albumId: album2.id, assetId: album2Asset.id });
await wait(2);
const { album: album1 } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album1.id, assetId: album1Asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
albumId: album1.id,
assetId: album1Asset.id,
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial album to asset sync
await ctx.syncAckAll(auth, response);
// add user to backfill album
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
// should backfill the album to asset relation
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: album2.id,
assetId: album2Asset.id,
}),
type: SyncEntityType.AlbumToAssetBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.AlbumToAssetBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should detect and sync a deleted album to asset relation', async () => {
const { auth, ctx } = await setup();
const albumRepo = ctx.get(AlbumRepository);
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
assetId: asset.id,
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await albumRepo.removeAssetIds(album.id, [asset.id]);
await wait(2);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
assetId: asset.id,
},
type: SyncEntityType.AlbumToAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should detect and sync a deleted album to asset relation when an asset is deleted', async () => {
const { auth, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
assetId: asset.id,
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await assetRepo.remove({ id: asset.id });
await wait(2);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
assetId: asset.id,
},
type: SyncEntityType.AlbumToAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
it('should not sync a deleted album to asset relation when the album is deleted', async () => {
const { auth, ctx } = await setup();
const albumRepo = ctx.get(AlbumRepository);
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
assetId: asset.id,
},
type: SyncEntityType.AlbumToAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await albumRepo.delete(album.id);
await wait(2);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
});
});

View file

@ -0,0 +1,306 @@
import { Kysely } from 'kysely';
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
import { AlbumUserRepository } from 'src/repositories/album-user.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.AlbumUsersV1, () => {
it('should sync an album user with the correct properties', async () => {
const { auth, ctx } = await setup();
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const { user } = await ctx.newUser();
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: albumUser.albumId,
role: albumUser.role,
userId: albumUser.userId,
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
describe('owner', () => {
it('should detect and sync a new shared user', async () => {
const { auth, ctx } = await setup();
const { user: user1 } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: albumUser.albumId,
role: albumUser.role,
userId: albumUser.userId,
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should detect and sync an updated shared user', async () => {
const { auth, ctx } = await setup();
const albumUserRepo = ctx.get(AlbumUserRepository);
const { user: user1 } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.update({ albumId: album.id, userId: user1.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: albumUser.albumId,
role: AlbumUserRole.Viewer,
userId: albumUser.userId,
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should detect and sync a deleted shared user', async () => {
const { auth, ctx } = await setup();
const albumUserRepo = ctx.get(AlbumUserRepository);
const { user: user1 } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.delete({ albumId: album.id, userId: user1.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: albumUser.albumId,
userId: albumUser.userId,
}),
type: SyncEntityType.AlbumUserDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
});
describe('shared user', () => {
it('should detect and sync a new shared user', async () => {
const { auth, ctx } = await setup();
const { user: user1 } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user1.id });
const { albumUser } = await ctx.newAlbumUser({
albumId: album.id,
userId: auth.user.id,
role: AlbumUserRole.Editor,
});
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: albumUser.albumId,
role: albumUser.role,
userId: albumUser.userId,
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should detect and sync an updated shared user', async () => {
const { auth, ctx } = await setup();
const albumUserRepo = ctx.get(AlbumUserRepository);
const { user: owner } = await ctx.newUser();
const { user: user } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: owner.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.update({ albumId: album.id, userId: user.id }, { role: AlbumUserRole.Viewer });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: album.id,
role: AlbumUserRole.Viewer,
userId: user.id,
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should detect and sync a deleted shared user', async () => {
const { auth, ctx } = await setup();
const albumUserRepo = ctx.get(AlbumUserRepository);
const { user: owner } = await ctx.newUser();
const { user: user } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: owner.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
await albumUserRepo.delete({ albumId: album.id, userId: user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: album.id,
userId: user.id,
}),
type: SyncEntityType.AlbumUserDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
it('should backfill album users when a user shares an album with you', async () => {
const { auth, ctx } = await setup();
const { user: user1 } = await ctx.newUser();
const { user: user2 } = await ctx.newUser();
const { album: album1 } = await ctx.newAlbum({ ownerId: user1.id });
const { album: album2 } = await ctx.newAlbum({ ownerId: user1.id });
// backfill album user
await ctx.newAlbumUser({ albumId: album1.id, userId: user1.id, role: AlbumUserRole.Editor });
await wait(2);
// initial album user
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
await wait(2);
// post checkpoint album user
await ctx.newAlbumUser({ albumId: album1.id, userId: user2.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: album2.id,
role: AlbumUserRole.Editor,
userId: auth.user.id,
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
// ack initial user
await ctx.syncAckAll(auth, response);
// get access to the backfill album user
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
// should backfill the album user
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: album1.id,
role: AlbumUserRole.Editor,
userId: user1.id,
}),
type: SyncEntityType.AlbumUserBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.AlbumUserBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: album1.id,
role: AlbumUserRole.Editor,
userId: user2.id,
}),
type: SyncEntityType.AlbumUserV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
albumId: album1.id,
role: AlbumUserRole.Editor,
userId: auth.user.id,
}),
type: SyncEntityType.AlbumUserV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
});
});
});

View file

@ -0,0 +1,235 @@
import { Kysely } from 'kysely';
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
import { AlbumUserRepository } from 'src/repositories/album-user.repository';
import { AlbumRepository } from 'src/repositories/album.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.AlbumsV1, () => {
it('should sync an album with the correct properties', async () => {
const { auth, ctx } = await setup();
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: album.id,
name: album.albumName,
ownerId: album.ownerId,
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync a new album', async () => {
const { auth, ctx } = await setup();
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: album.id,
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album delete', async () => {
const { auth, ctx } = await setup();
const albumRepo = ctx.get(AlbumRepository);
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: album.id,
}),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await albumRepo.delete(album.id);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: {
albumId: album.id,
},
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
describe('shared albums', () => {
it('should detect and sync an album create', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album share (share before sync)', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album share (share after sync)', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { album: userAlbum } = await ctx.newAlbum({ ownerId: auth.user.id });
const { album: user2Album } = await ctx.newAlbum({ ownerId: user2.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: userAlbum.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newAlbumUser({ userId: auth.user.id, albumId: user2Album.id, role: AlbumUserRole.Editor });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user2Album.id }),
type: SyncEntityType.AlbumV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album delete`', async () => {
const { auth, ctx } = await setup();
const albumRepo = ctx.get(AlbumRepository);
const { user: user2 } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await albumRepo.delete(album.id);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: { albumId: album.id },
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
it('should detect and sync an album unshare as an album delete', async () => {
const { auth, ctx } = await setup();
const albumUserRepo = ctx.get(AlbumUserRepository);
const { user: user2 } = await ctx.newUser();
const { album } = await ctx.newAlbum({ ownerId: user2.id });
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.AlbumV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
await albumUserRepo.delete({ albumId: album.id, userId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: { albumId: album.id },
type: SyncEntityType.AlbumDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
});
});
});

View file

@ -0,0 +1,81 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.AssetExifsV1, () => {
it('should detect and sync the first asset exif', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const response = await ctx.syncStream(auth, [SyncRequestType.AssetExifsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
assetId: asset.id,
city: null,
country: null,
dateTimeOriginal: null,
description: '',
exifImageHeight: null,
exifImageWidth: null,
exposureTime: null,
fNumber: null,
fileSizeInByte: null,
focalLength: null,
fps: null,
iso: null,
latitude: null,
lensModel: null,
longitude: null,
make: 'Canon',
model: null,
modifyDate: null,
orientation: null,
profileDescription: null,
projectionType: null,
rating: null,
state: null,
timeZone: null,
},
type: SyncEntityType.AssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetExifsV1]);
});
it('should only sync asset exif for own user', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const { session } = await ctx.newSession({ userId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetExifsV1]);
});
});

View file

@ -0,0 +1,99 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { PersonRepository } from 'src/repositories/person.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.AssetFaceV1, () => {
it('should detect and sync the first asset face', async () => {
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const { person } = await ctx.newPerson({ ownerId: auth.user.id });
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id, personId: person.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetFace.id,
assetId: asset.id,
personId: person.id,
imageWidth: assetFace.imageWidth,
imageHeight: assetFace.imageHeight,
boundingBoxX1: assetFace.boundingBoxX1,
boundingBoxY1: assetFace.boundingBoxY1,
boundingBoxX2: assetFace.boundingBoxX2,
boundingBoxY2: assetFace.boundingBoxY2,
sourceType: assetFace.sourceType,
}),
type: 'AssetFaceV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
});
it('should detect and sync a deleted asset face', async () => {
const { auth, ctx } = await setup();
const personRepo = ctx.get(PersonRepository);
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id });
await personRepo.deleteAssetFace(assetFace.id);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
assetFaceId: assetFace.id,
},
type: 'AssetFaceDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
});
it('should not sync an asset face or asset face delete for an unrelated user', async () => {
const { auth, ctx } = await setup();
const personRepo = ctx.get(PersonRepository);
const { user: user2 } = await ctx.newUser();
const { session } = await ctx.newSession({ userId: user2.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetFaceV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
await personRepo.deleteAssetFace(assetFace.id);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetFaceDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
});
});

View file

@ -0,0 +1,128 @@
import { Kysely } from 'kysely';
import { AssetMetadataKey, SyncEntityType, SyncRequestType } from 'src/enum';
import { AssetRepository } from 'src/repositories/asset.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.AssetMetadataV1, () => {
it('should detect and sync new asset metadata', async () => {
const { auth, user, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { asset } = await ctx.newAsset({ ownerId: user.id });
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
key: AssetMetadataKey.MobileApp,
assetId: asset.id,
value: { iCloudId: 'abc123' },
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetMetadataV1]);
});
it('should update asset metadata', async () => {
const { auth, user, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { asset } = await ctx.newAsset({ ownerId: user.id });
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
key: AssetMetadataKey.MobileApp,
assetId: asset.id,
value: { iCloudId: 'abc123' },
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc456' } }]);
const updatedResponse = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(updatedResponse).toEqual([
{
ack: expect.any(String),
data: {
key: AssetMetadataKey.MobileApp,
assetId: asset.id,
value: { iCloudId: 'abc456' },
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, updatedResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetMetadataV1]);
});
});
describe(SyncEntityType.AssetMetadataDeleteV1, () => {
it('should delete and sync asset metadata', async () => {
const { auth, user, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { asset } = await ctx.newAsset({ ownerId: user.id });
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
key: AssetMetadataKey.MobileApp,
assetId: asset.id,
value: { iCloudId: 'abc123' },
},
type: 'AssetMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await assetRepo.deleteMetadataByKey(asset.id, AssetMetadataKey.MobileApp);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1])).resolves.toEqual([
{
ack: expect.any(String),
data: {
assetId: asset.id,
key: AssetMetadataKey.MobileApp,
},
type: 'AssetMetadataDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View file

@ -0,0 +1,121 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { AssetRepository } from 'src/repositories/asset.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.AssetV1, () => {
it('should detect and sync the first asset', async () => {
const originalFileName = 'firstAsset';
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const date = new Date().toISOString();
const { auth, ctx } = await setup();
const { asset } = await ctx.newAsset({
originalFileName,
ownerId: auth.user.id,
checksum: Buffer.from(checksum, 'base64'),
thumbhash: Buffer.from(thumbhash, 'base64'),
fileCreatedAt: date,
fileModifiedAt: date,
localDateTime: date,
deletedAt: null,
duration: '0:10:00.00000',
libraryId: null,
width: 1920,
height: 1080,
});
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
id: asset.id,
originalFileName,
ownerId: asset.ownerId,
thumbhash,
checksum,
deletedAt: asset.deletedAt,
fileCreatedAt: asset.fileCreatedAt,
fileModifiedAt: asset.fileModifiedAt,
isFavorite: asset.isFavorite,
localDateTime: asset.localDateTime,
type: asset.type,
visibility: asset.visibility,
duration: asset.duration,
stackId: null,
livePhotoVideoId: null,
libraryId: asset.libraryId,
width: asset.width,
height: asset.height,
isEdited: asset.isEdited,
},
type: 'AssetV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should detect and sync a deleted asset', async () => {
const { auth, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await assetRepo.remove(asset);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
assetId: asset.id,
},
type: 'AssetDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should not sync an asset or asset delete for an unrelated user', async () => {
const { auth, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { user: user2 } = await ctx.newUser();
const { session } = await ctx.newSession({ userId: user2.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
await assetRepo.remove(asset);
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
});

View file

@ -0,0 +1,106 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.AuthUserV1, () => {
it('should detect and sync the first user', async () => {
const { auth, user, ctx } = await setup(await getKyselyDB());
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
id: user.id,
isAdmin: user.isAdmin,
deletedAt: user.deletedAt,
name: user.name,
avatarColor: user.avatarColor,
email: user.email,
pinCode: user.pinCode,
hasProfileImage: false,
profileChangedAt: (user.profileChangedAt as Date).toISOString(),
oauthId: user.oauthId,
quotaSizeInBytes: user.quotaSizeInBytes,
quotaUsageInBytes: user.quotaUsageInBytes,
storageLabel: user.storageLabel,
},
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AuthUsersV1]);
});
it('should sync a change and then another change to that same user', async () => {
const { auth, user, ctx } = await setup(await getKyselyDB());
const userRepo = ctx.get(UserRepository);
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: user.id,
isAdmin: false,
}),
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await userRepo.update(user.id, { isAdmin: true });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: user.id,
isAdmin: true,
}),
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
it('should only sync the auth user', async () => {
const { auth, user, ctx } = await setup(await getKyselyDB());
await ctx.newUser();
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: user.id,
isAdmin: false,
}),
type: 'AuthUserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View file

@ -0,0 +1,60 @@
import { Kysely } from 'kysely';
import { DateTime } from 'luxon';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { SyncCheckpointRepository } from 'src/repositories/sync-checkpoint.repository';
import { DB } from 'src/schema';
import { toAck } from 'src/utils/sync';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
import { v7 } from 'uuid';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.SyncCompleteV1, () => {
it('should work', async () => {
const { auth, ctx } = await setup();
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should detect an old checkpoint and send back a reset', async () => {
const { auth, session, ctx } = await setup();
const updateId = v7({ msecs: DateTime.now().minus({ days: 60 }).toMillis() });
await ctx.get(SyncCheckpointRepository).upsertAll([
{
type: SyncEntityType.SyncCompleteV1,
sessionId: session.id,
ack: toAck({ type: SyncEntityType.SyncCompleteV1, updateId }),
},
]);
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toEqual([{ type: SyncEntityType.SyncResetV1, data: {}, ack: 'SyncResetV1|reset' }]);
});
it('should not send back a reset if the checkpoint is recent', async () => {
const { auth, session, ctx } = await setup();
const updateId = v7({ msecs: DateTime.now().minus({ days: 7 }).toMillis() });
await ctx.get(SyncCheckpointRepository).upsertAll([
{
type: SyncEntityType.SyncCompleteV1,
sessionId: session.id,
ack: toAck({ type: SyncEntityType.SyncCompleteV1, updateId }),
},
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
});

View file

@ -0,0 +1,91 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.MemoryToAssetV1, () => {
it('should detect and sync a memory to asset relation', async () => {
const { auth, user, ctx } = await setup();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const { memory } = await ctx.newMemory({ ownerId: user.id });
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
memoryId: memory.id,
assetId: asset.id,
},
type: 'MemoryToAssetV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
});
it('should detect and sync a deleted memory to asset relation', async () => {
const { auth, user, ctx } = await setup();
const memoryRepo = ctx.get(MemoryRepository);
const { asset } = await ctx.newAsset({ ownerId: user.id });
const { memory } = await ctx.newMemory({ ownerId: user.id });
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
await memoryRepo.removeAssetIds(memory.id, [asset.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
assetId: asset.id,
memoryId: memory.id,
},
type: 'MemoryToAssetDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
});
it('should not sync a memory to asset relation or delete for an unrelated user', async () => {
const { auth, ctx } = await setup();
const memoryRepo = ctx.get(MemoryRepository);
const { auth: auth2, user: user2 } = await ctx.newSyncAuthUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { memory } = await ctx.newMemory({ ownerId: user2.id });
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.MemoryToAssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
await memoryRepo.removeAssetIds(memory.id, [asset.id]);
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.MemoryToAssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
});
});

View file

@ -0,0 +1,115 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.MemoryV1, () => {
it('should detect and sync the first memory with the right properties', async () => {
const { auth, user: user1, ctx } = await setup();
const { memory } = await ctx.newMemory({ ownerId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
id: memory.id,
createdAt: expect.any(String),
updatedAt: expect.any(String),
deletedAt: memory.deletedAt,
type: memory.type,
data: memory.data,
hideAt: memory.hideAt,
showAt: memory.showAt,
seenAt: memory.seenAt,
memoryAt: expect.any(String),
isSaved: memory.isSaved,
ownerId: memory.ownerId,
},
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
});
it('should detect and sync a deleted memory', async () => {
const { auth, user, ctx } = await setup();
const memoryRepo = ctx.get(MemoryRepository);
const { memory } = await ctx.newMemory({ ownerId: user.id });
await memoryRepo.delete(memory.id);
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
memoryId: memory.id,
},
type: 'MemoryDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
});
it('should sync a memory and then an update to that same memory', async () => {
const { auth, user, ctx } = await setup();
const memoryRepo = ctx.get(MemoryRepository);
const { memory } = await ctx.newMemory({ ownerId: user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: memory.id }),
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await memoryRepo.update(memory.id, { seenAt: new Date() });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: memory.id }),
type: 'MemoryV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
});
it('should not sync a memory or a memory delete for an unrelated user', async () => {
const { auth, ctx } = await setup();
const memoryRepo = ctx.get(MemoryRepository);
const { user: user2 } = await ctx.newUser();
const { memory } = await ctx.newMemory({ ownerId: user2.id });
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
await memoryRepo.delete(memory.id);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
});
});

View file

@ -0,0 +1,253 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.PartnerAssetExifsV1, () => {
it('should detect and sync the first partner asset exif', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
assetId: asset.id,
city: null,
country: null,
dateTimeOriginal: null,
description: '',
exifImageHeight: null,
exifImageWidth: null,
exposureTime: null,
fNumber: null,
fileSizeInByte: null,
focalLength: null,
fps: null,
iso: null,
latitude: null,
lensModel: null,
longitude: null,
make: 'Canon',
model: null,
modifyDate: null,
orientation: null,
profileDescription: null,
projectionType: null,
rating: null,
state: null,
timeZone: null,
},
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should not sync partner asset exif for own user', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should not sync partner asset exif for unrelated user', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const { asset } = await ctx.newAsset({ ownerId: user3.id });
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
const { session } = await ctx.newSession({ userId: user3.id });
const authUser3 = factory.auth({ session, user: user3 });
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should backfill partner asset exif when a partner shared their library with you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
await ctx.newExif({ assetId: assetUser3.id, make: 'Canon' });
await wait(2);
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: assetUser2.id, make: 'Canon' });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetExifBackfillV1,
},
{
ack: expect.any(String),
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should handle partners with users ids lower than a uuidv7', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser({ id: '00d4c0af-7695-4cf2-85e6-415eeaf449cb' });
const { user: user3 } = await ctx.newUser({ id: '00e4c0af-7695-4cf2-85e6-415eeaf449cb' });
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
await ctx.newExif({ assetId: assetUser3.id, make: 'assetUser3' });
await wait(2);
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: assetUser2.id, make: 'assetUser2' });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
// This checks that our ack upsert is correct
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toEqual([
{
ack: expect.stringMatching(new RegExp(String.raw`${SyncEntityType.PartnerAssetExifBackfillV1}\|.+?\|.+`)),
data: expect.objectContaining({
assetId: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetExifBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerAssetExifBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
await ctx.newExif({ assetId: assetUser3.id, make: 'assetUser3' });
await wait(2);
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newExif({ assetId: assetUser2.id, make: 'assetUser2' });
await wait(2);
const { asset: asset2User3 } = await ctx.newAsset({ ownerId: user3.id });
await ctx.newExif({ assetId: asset2User3.id, make: 'asset2User3' });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
expect(newResponse).toEqual([
{
ack: expect.stringMatching(new RegExp(String.raw`${SyncEntityType.PartnerAssetExifBackfillV1}\|.+?\|.+`)),
data: expect.objectContaining({
assetId: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetExifBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerAssetExifBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
assetId: asset2User3.id,
}),
type: SyncEntityType.PartnerAssetExifV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
});
});

View file

@ -0,0 +1,279 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { AssetRepository } from 'src/repositories/asset.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.PartnerAssetsV1, () => {
it('should detect and sync the first partner asset', async () => {
const { auth, ctx } = await setup();
const originalFileName = 'firstPartnerAsset';
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
const date = new Date().toISOString();
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({
ownerId: user2.id,
originalFileName,
checksum: Buffer.from(checksum, 'base64'),
thumbhash: Buffer.from(thumbhash, 'base64'),
fileCreatedAt: date,
fileModifiedAt: date,
localDateTime: date,
deletedAt: null,
duration: '0:10:00.00000',
libraryId: null,
});
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
id: asset.id,
ownerId: asset.ownerId,
originalFileName,
thumbhash,
checksum,
deletedAt: null,
fileCreatedAt: date,
fileModifiedAt: date,
isFavorite: false,
localDateTime: date,
type: asset.type,
visibility: asset.visibility,
duration: asset.duration,
isEdited: asset.isEdited,
stackId: null,
livePhotoVideoId: null,
libraryId: asset.libraryId,
width: null,
height: null,
},
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should detect and sync a deleted partner asset', async () => {
const { auth, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await assetRepo.remove(asset);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
assetId: asset.id,
},
type: SyncEntityType.PartnerAssetDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should not sync a deleted partner asset due to a user delete', async () => {
const { auth, ctx } = await setup();
const userRepo = ctx.get(UserRepository);
const { user: user2 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await ctx.newAsset({ ownerId: user2.id });
await userRepo.delete({ id: user2.id }, true);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => {
const { auth, ctx } = await setup();
const partnerRepo = ctx.get(PartnerRepository);
const { user: user2 } = await ctx.newUser();
await ctx.newAsset({ ownerId: user2.id });
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.PartnerAssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await partnerRepo.remove(partner);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should not sync an asset or asset delete for own user', async () => {
const { auth, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await assetRepo.remove(asset);
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should not sync an asset or asset delete for unrelated user', async () => {
const { auth, ctx } = await setup();
const assetRepo = ctx.get(AssetRepository);
const { user: user2 } = await ctx.newUser();
const { session } = await ctx.newSession({ userId: user2.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
await assetRepo.remove(asset);
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should backfill partner assets when a partner shared their library with you', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
await wait(2);
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerAssetBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
await wait(2);
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
await wait(2);
const { asset: asset2User3 } = await ctx.newAsset({ ownerId: user3.id });
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetUser2.id,
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: assetUser3.id,
}),
type: SyncEntityType.PartnerAssetBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerAssetBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
id: asset2User3.id,
}),
type: SyncEntityType.PartnerAssetV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
});
});

View file

@ -0,0 +1,247 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { StackRepository } from 'src/repositories/stack.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB, wait } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncRequestType.PartnerStacksV1, () => {
it('should detect and sync the first partner stack', async () => {
const { auth, user, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
id: stack.id,
ownerId: stack.ownerId,
createdAt: (stack.createdAt as Date).toISOString(),
updatedAt: (stack.updatedAt as Date).toISOString(),
primaryAssetId: stack.primaryAssetId,
},
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should detect and sync a deleted partner stack', async () => {
const { auth, user, ctx } = await setup();
const stackRepo = ctx.get(StackRepository);
const { user: user2 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
await stackRepo.delete(stack.id);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toEqual([
{
ack: expect.stringContaining('PartnerStackDeleteV1'),
data: {
stackId: stack.id,
},
type: SyncEntityType.PartnerStackDeleteV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should not sync a deleted partner stack due to a user delete', async () => {
const { auth, user, ctx } = await setup();
const userRepo = ctx.get(UserRepository);
const { user: user2 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newStack({ ownerId: user2.id }, [asset.id]);
await userRepo.delete({ id: user2.id }, true);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should not sync a deleted partner stack due to a partner delete (unshare)', async () => {
const { auth, user, ctx } = await setup();
const partnerRepo = ctx.get(PartnerRepository);
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user2.id });
await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.PartnerStackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await partnerRepo.remove(partner);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should not sync a stack or stack delete for own user', async () => {
const { auth, user, ctx } = await setup();
const stackRepo = ctx.get(StackRepository);
const { user: user2 } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset.id]);
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await stackRepo.delete(stack.id);
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should not sync a stack or stack delete for unrelated user', async () => {
const { auth, ctx } = await setup();
const stackRepo = ctx.get(StackRepository);
const { user: user2 } = await ctx.newUser();
const { session } = await ctx.newSession({ userId: user2.id });
const { asset } = await ctx.newAsset({ ownerId: user2.id });
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
const auth2 = factory.auth({ session, user: user2 });
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
await stackRepo.delete(stack.id);
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.StackDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should backfill partner stacks when a partner shared their library with you', async () => {
const { auth, user, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { asset: asset3 } = await ctx.newAsset({ ownerId: user3.id });
const { stack: stack3 } = await ctx.newStack({ ownerId: user3.id }, [asset3.id]);
await wait(2);
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
const { stack: stack2 } = await ctx.newStack({ ownerId: user2.id }, [asset2.id]);
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toEqual([
{
ack: expect.stringContaining('PartnerStackV1'),
data: expect.objectContaining({
id: stack2.id,
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(newResponse).toEqual([
{
ack: expect.stringContaining(SyncEntityType.PartnerStackBackfillV1),
data: expect.objectContaining({
id: stack3.id,
}),
type: SyncEntityType.PartnerStackBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerStackBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
it('should only backfill partner stacks created prior to the current partner stack checkpoint', async () => {
const { auth, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { asset: asset3 } = await ctx.newAsset({ ownerId: user3.id });
const { stack: stack3 } = await ctx.newStack({ ownerId: user3.id }, [asset3.id]);
await wait(2);
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
const { stack: stack2 } = await ctx.newStack({ ownerId: user2.id }, [asset2.id]);
await wait(2);
const { asset: asset4 } = await ctx.newAsset({ ownerId: user3.id });
const { stack: stack4 } = await ctx.newStack({ ownerId: user3.id }, [asset4.id]);
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(response).toEqual([
{
ack: expect.stringContaining(SyncEntityType.PartnerStackV1),
data: expect.objectContaining({
id: stack2.id,
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: stack3.id,
}),
type: SyncEntityType.PartnerStackBackfillV1,
},
{
ack: expect.stringContaining(SyncEntityType.PartnerStackBackfillV1),
data: {},
type: SyncEntityType.SyncAckV1,
},
{
ack: expect.any(String),
data: expect.objectContaining({
id: stack4.id,
}),
type: SyncEntityType.PartnerStackV1,
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
});
});

View file

@ -0,0 +1,178 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.PartnerV1, () => {
it('should detect and sync the first partner', async () => {
const { auth, user: user1, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
inTimeline: partner.inTimeline,
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should detect and sync a deleted partner', async () => {
const { auth, user: user1, ctx } = await setup();
const partnerRepo = ctx.get(PartnerRepository);
const { user: user2 } = await ctx.newUser();
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
await partnerRepo.remove(partner);
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
type: 'PartnerDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should detect and sync a partner share both to and from another user', async () => {
const { auth, user: user1, ctx } = await setup();
const { user: user2 } = await ctx.newUser();
const { partner: partner1 } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
const { partner: partner2 } = await ctx.newPartner({ sharedById: user1.id, sharedWithId: user2.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
inTimeline: partner1.inTimeline,
sharedById: partner1.sharedById,
sharedWithId: partner1.sharedWithId,
},
type: 'PartnerV1',
},
{
ack: expect.any(String),
data: {
inTimeline: partner2.inTimeline,
sharedById: partner2.sharedById,
sharedWithId: partner2.sharedWithId,
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should sync a partner and then an update to that same partner', async () => {
const { auth, user: user1, ctx } = await setup();
const partnerRepo = ctx.get(PartnerRepository);
const { user: user2 } = await ctx.newUser();
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
inTimeline: partner.inTimeline,
sharedById: partner.sharedById,
sharedWithId: partner.sharedWithId,
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
const updated = await partnerRepo.update(
{ sharedById: partner.sharedById, sharedWithId: partner.sharedWithId },
{ inTimeline: true },
);
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: {
inTimeline: updated.inTimeline,
sharedById: updated.sharedById,
sharedWithId: updated.sharedWithId,
},
type: 'PartnerV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should not sync a partner or partner delete for an unrelated user', async () => {
const { auth, ctx } = await setup();
const partnerRepo = ctx.get(PartnerRepository);
const { user: user2 } = await ctx.newUser();
const { user: user3 } = await ctx.newUser();
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user3.id });
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
await partnerRepo.remove(partner);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
it('should not sync a partner delete after a user is deleted', async () => {
const { auth, ctx } = await setup();
const userRepo = ctx.get(UserRepository);
const { user: user2 } = await ctx.newUser();
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
await userRepo.delete({ id: user2.id }, true);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
});
});

View file

@ -0,0 +1,93 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { PersonRepository } from 'src/repositories/person.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { factory } from 'test/small.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.PersonV1, () => {
it('should detect and sync the first person', async () => {
const { auth, ctx } = await setup();
const { person } = await ctx.newPerson({ ownerId: auth.user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.PeopleV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({
id: person.id,
name: person.name,
isHidden: person.isHidden,
birthDate: person.birthDate,
faceAssetId: person.faceAssetId,
isFavorite: person.isFavorite,
ownerId: auth.user.id,
color: person.color,
}),
type: 'PersonV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
});
it('should detect and sync a deleted person', async () => {
const { auth, ctx } = await setup();
const personRepo = ctx.get(PersonRepository);
const { person } = await ctx.newPerson({ ownerId: auth.user.id });
await personRepo.delete([person.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.PeopleV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
personId: person.id,
},
type: 'PersonDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
});
it('should not sync a person or person delete for an unrelated user', async () => {
const { auth, ctx } = await setup();
const personRepo = ctx.get(PersonRepository);
const { user: user2 } = await ctx.newUser();
const { session } = await ctx.newSession({ userId: user2.id });
const { person } = await ctx.newPerson({ ownerId: user2.id });
const auth2 = factory.auth({ session, user: user2 });
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.PersonV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
await personRepo.delete([person.id]);
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toEqual([
expect.objectContaining({ type: SyncEntityType.PersonDeleteV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
});
});

View file

@ -0,0 +1,94 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { SessionRepository } from 'src/repositories/session.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.SyncResetV1, () => {
it('should work', async () => {
const { auth, ctx } = await setup();
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
});
it('should detect a pending sync reset', async () => {
const { auth, ctx } = await setup();
await ctx.get(SessionRepository).update(auth.session!.id, {
isPendingSyncReset: true,
});
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(response).toEqual([{ type: SyncEntityType.SyncResetV1, data: {}, ack: 'SyncResetV1|reset' }]);
});
it('should not send other dtos when a reset is pending', async () => {
const { auth, user, ctx } = await setup();
await ctx.newAsset({ ownerId: user.id });
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.get(SessionRepository).update(auth.session!.id, {
isPendingSyncReset: true,
});
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
{ type: SyncEntityType.SyncResetV1, data: {}, ack: 'SyncResetV1|reset' },
]);
});
it('should allow resetting a pending reset when requesting changes ', async () => {
const { auth, user, ctx } = await setup();
await ctx.newAsset({ ownerId: user.id });
await ctx.get(SessionRepository).update(auth.session!.id, {
isPendingSyncReset: true,
});
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1], true)).resolves.toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
it('should reset the sync progress', async () => {
const { auth, user, ctx } = await setup();
await ctx.newAsset({ ownerId: user.id });
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
await ctx.syncAckAll(auth, response);
await ctx.get(SessionRepository).update(auth.session!.id, {
isPendingSyncReset: true,
});
const resetResponse = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
await ctx.syncAckAll(auth, resetResponse);
const postResetResponse = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
expect(postResetResponse).toEqual([
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View file

@ -0,0 +1,114 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { StackRepository } from 'src/repositories/stack.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.StackV1, () => {
it('should detect and sync the first stack', async () => {
const { auth, user, ctx } = await setup();
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toEqual([
{
ack: expect.stringContaining('StackV1'),
data: {
id: stack.id,
createdAt: (stack.createdAt as Date).toISOString(),
updatedAt: (stack.updatedAt as Date).toISOString(),
primaryAssetId: stack.primaryAssetId,
ownerId: stack.ownerId,
},
type: 'StackV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
});
it('should detect and sync a deleted stack', async () => {
const { auth, user, ctx } = await setup();
const stackRepo = ctx.get(StackRepository);
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
await stackRepo.delete(stack.id);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toEqual([
{
ack: expect.stringContaining('StackDeleteV1'),
data: { stackId: stack.id },
type: 'StackDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
});
it('should sync a stack and then an update to that same stack', async () => {
const { auth, user, ctx } = await setup();
const stackRepo = ctx.get(StackRepository);
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(response).toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await stackRepo.update(stack.id, { primaryAssetId: asset2.id });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
expect(newResponse).toEqual([
expect.objectContaining({ type: SyncEntityType.StackV1 }),
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
expect(newResponse).toEqual([
{
ack: expect.stringContaining('StackV1'),
data: expect.objectContaining({ id: stack.id, primaryAssetId: asset2.id }),
type: 'StackV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, newResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
});
it('should not sync a stack or stack delete for an unrelated user', async () => {
const { auth, ctx } = await setup();
const stackRepo = ctx.get(StackRepository);
const { user: user2 } = await ctx.newUser();
const { asset: asset1 } = await ctx.newAsset({ ownerId: user2.id });
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset1.id, asset2.id]);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
await stackRepo.delete(stack.id);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
});
});

View file

@ -0,0 +1,19 @@
import { SyncRequestType } from 'src/enum';
import { SYNC_TYPES_ORDER } from 'src/services/sync.service';
describe('types', () => {
it('should have all the types in the ordering variable', () => {
for (const key in SyncRequestType) {
expect(SYNC_TYPES_ORDER).includes(key);
}
expect(SYNC_TYPES_ORDER.length).toBe(Object.keys(SyncRequestType).length);
});
it('should ensure album follows albums assets', () => {
const albumIndex = SYNC_TYPES_ORDER.indexOf(SyncRequestType.AlbumsV1);
const albumAssetsIndex = SYNC_TYPES_ORDER.indexOf(SyncRequestType.AlbumAssetsV1);
expect(albumIndex).toBeGreaterThan(albumAssetsIndex);
});
});

View file

@ -0,0 +1,125 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType, UserMetadataKey } from 'src/enum';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.UserMetadataV1, () => {
it('should detect and sync new user metadata', async () => {
const { auth, user, ctx } = await setup();
const userRepo = ctx.get(UserRepository);
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
key: UserMetadataKey.Onboarding,
userId: user.id,
value: { isOnboarded: true },
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UserMetadataV1]);
});
it('should update user metadata', async () => {
const { auth, user, ctx } = await setup();
const userRepo = ctx.get(UserRepository);
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
key: UserMetadataKey.Onboarding,
userId: user.id,
value: { isOnboarded: true },
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: false } });
const updatedResponse = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(updatedResponse).toEqual([
{
ack: expect.any(String),
data: {
key: UserMetadataKey.Onboarding,
userId: user.id,
value: { isOnboarded: false },
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, updatedResponse);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UserMetadataV1]);
});
});
describe(SyncEntityType.UserMetadataDeleteV1, () => {
it('should delete and sync user metadata', async () => {
const { auth, user, ctx } = await setup();
const userRepo = ctx.get(UserRepository);
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
key: UserMetadataKey.Onboarding,
userId: user.id,
value: { isOnboarded: true },
},
type: 'UserMetadataV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await userRepo.deleteMetadata(auth.user.id, UserMetadataKey.Onboarding);
await expect(ctx.syncStream(auth, [SyncRequestType.UserMetadataV1])).resolves.toEqual([
{
ack: expect.any(String),
data: {
userId: user.id,
key: UserMetadataKey.Onboarding,
},
type: 'UserMetadataDeleteV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View file

@ -0,0 +1,137 @@
import { Kysely } from 'kysely';
import { SyncEntityType, SyncRequestType } from 'src/enum';
import { UserRepository } from 'src/repositories/user.repository';
import { DB } from 'src/schema';
import { SyncTestContext } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = async (db?: Kysely<DB>) => {
const ctx = new SyncTestContext(db || defaultDatabase);
const { auth, user, session } = await ctx.newSyncAuthUser();
return { auth, user, session, ctx };
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(SyncEntityType.UserV1, () => {
it('should detect and sync the first user', async () => {
const { auth, ctx } = await setup(await getKyselyDB());
const userRepo = ctx.get(UserRepository);
const user = await userRepo.get(auth.user.id, { withDeleted: false });
if (!user) {
expect.fail('First user should exist');
}
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
deletedAt: user.deletedAt,
email: user.email,
hasProfileImage: user.profileImagePath !== '',
id: user.id,
name: user.name,
avatarColor: user.avatarColor,
profileChangedAt: user.profileChangedAt.toISOString(),
},
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
});
it('should detect and sync a soft deleted user', async () => {
const { auth, ctx } = await setup(await getKyselyDB());
const { user: deleted } = await ctx.newUser({ deletedAt: new Date().toISOString() });
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toEqual(
expect.arrayContaining([
{
ack: expect.any(String),
data: expect.objectContaining({ id: auth.user.id }),
type: 'UserV1',
},
{
ack: expect.any(String),
data: expect.objectContaining({ id: deleted.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]),
);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
});
it('should detect and sync a deleted user', async () => {
const { auth, user: authUser, ctx } = await setup(await getKyselyDB());
const userRepo = ctx.get(UserRepository);
const { user } = await ctx.newUser();
await userRepo.delete({ id: user.id }, true);
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: {
userId: user.id,
},
type: 'UserDeleteV1',
},
{
ack: expect.any(String),
data: expect.objectContaining({ id: authUser.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
});
it('should sync a user and then an update to that same user', async () => {
const { auth, user, ctx } = await setup(await getKyselyDB());
const userRepo = ctx.get(UserRepository);
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(response).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user.id }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
await ctx.syncAckAll(auth, response);
const updated = await userRepo.update(auth.user.id, { name: 'new name' });
const newResponse = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
expect(newResponse).toEqual([
{
ack: expect.any(String),
data: expect.objectContaining({ id: user.id, name: updated.name }),
type: 'UserV1',
},
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
]);
});
});

View file

@ -0,0 +1,73 @@
import { AccessRepository } from 'src/repositories/access.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
type IAccessRepository = { [K in keyof AccessRepository]: RepositoryInterface<AccessRepository[K]> };
export type IAccessRepositoryMock = {
[K in keyof IAccessRepository]: Mocked<IAccessRepository[K]>;
};
export const newAccessRepositoryMock = (): IAccessRepositoryMock => {
return {
activity: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
checkAlbumOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
checkCreateAccess: vitest.fn().mockResolvedValue(new Set()),
},
asset: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
checkAlbumAccess: vitest.fn().mockResolvedValue(new Set()),
checkPartnerAccess: vitest.fn().mockResolvedValue(new Set()),
checkSharedLinkAccess: vitest.fn().mockResolvedValue(new Set()),
},
album: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
checkSharedAlbumAccess: vitest.fn().mockResolvedValue(new Set()),
checkSharedLinkAccess: vitest.fn().mockResolvedValue(new Set()),
},
authDevice: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
memory: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
notification: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
person: {
checkFaceOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
partner: {
checkUpdateAccess: vitest.fn().mockResolvedValue(new Set()),
},
session: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
stack: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
timeline: {
checkPartnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
tag: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
workflow: {
checkOwnerAccess: vitest.fn().mockResolvedValue(new Set()),
},
};
};

View file

@ -0,0 +1,57 @@
import { AssetRepository } from 'src/repositories/asset.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
export const newAssetRepositoryMock = (): Mocked<RepositoryInterface<AssetRepository>> => {
return {
create: vitest.fn(),
createAll: vitest.fn(),
upsertExif: vitest.fn(),
updateAllExif: vitest.fn(),
updateDateTimeOriginal: vitest.fn().mockResolvedValue([]),
unlockProperties: vitest.fn().mockResolvedValue([]),
upsertJobStatus: vitest.fn(),
getForCopy: vitest.fn(),
getByDayOfYear: vitest.fn(),
getByIds: vitest.fn().mockResolvedValue([]),
getByIdsWithAllRelationsButStacks: vitest.fn().mockResolvedValue([]),
getByDeviceIds: vitest.fn(),
getById: vitest.fn(),
getByChecksum: vitest.fn(),
getByChecksums: vitest.fn(),
getUploadAssetIdByChecksum: vitest.fn(),
getRandom: vitest.fn(),
getAllByDeviceId: vitest.fn(),
getLivePhotoCount: vitest.fn(),
getLibraryAssetCount: vitest.fn(),
updateAll: vitest.fn(),
getByLibraryIdAndOriginalPath: vitest.fn(),
deleteAll: vitest.fn(),
update: vitest.fn(),
remove: vitest.fn(),
findLivePhotoMatch: vitest.fn(),
getStatistics: vitest.fn(),
getTimeBucket: vitest.fn(),
getTimeBuckets: vitest.fn(),
getAssetIdByCity: vitest.fn(),
getAllForUserFullSync: vitest.fn(),
getChangedDeltaSync: vitest.fn(),
upsertFile: vitest.fn(),
upsertFiles: vitest.fn(),
deleteFile: vitest.fn(),
deleteFiles: vitest.fn(),
detectOfflineExternalAssets: vitest.fn(),
filterNewExternalAssetPaths: vitest.fn(),
updateByLibraryId: vitest.fn(),
getFileSamples: vitest.fn(),
getMetadata: vitest.fn(),
getMetadataByKey: vitest.fn(),
upsertMetadata: vitest.fn(),
upsertBulkMetadata: vitest.fn(),
deleteMetadataByKey: vitest.fn(),
deleteBulkMetadata: vitest.fn(),
getForOriginal: vitest.fn(),
getForThumbnail: vitest.fn(),
getForVideo: vitest.fn(),
};
};

View file

@ -0,0 +1,112 @@
import { DatabaseExtension, ImmichEnvironment, ImmichWorker, LogFormat } from 'src/enum';
import { ConfigRepository, EnvData } from 'src/repositories/config.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
const envData: EnvData = {
port: 2283,
environment: ImmichEnvironment.Production,
logFormat: LogFormat.Console,
buildMetadata: {},
bull: {
config: {
connection: {},
prefix: 'immich_bull',
},
queues: [{ name: 'queue-1' }],
},
cls: {
config: {},
},
database: {
config: {
connectionType: 'parts',
database: 'immich',
host: 'database',
port: 5432,
username: 'postgres',
password: 'postgres',
},
skipMigrations: false,
vectorExtension: DatabaseExtension.Vectors,
},
licensePublicKey: {
client: 'client-public-key',
server: 'server-public-key',
},
network: {
trustedProxies: [],
},
otel: {
metrics: {
hostMetrics: false,
apiMetrics: {
enable: false,
ignoreRoutes: [],
},
},
},
redis: {
host: 'redis',
port: 6379,
db: 0,
},
resourcePaths: {
lockFile: 'build-lock.json',
geodata: {
dateFile: '/build/geodata/geodata-date.txt',
admin1: '/build/geodata/admin1CodesASCII.txt',
admin2: '/build/geodata/admin2Codes.txt',
cities500: '/build/geodata/cities500.txt',
naturalEarthCountriesPath: 'build/ne_10m_admin_0_countries.geojson',
},
web: {
root: '/build/www',
indexHtml: '/build/www/index.html',
},
corePlugin: '/build/corePlugin',
},
setup: {
allow: true,
},
storage: {
ignoreMountCheckErrors: false,
},
telemetry: {
apiPort: 8081,
microservicesPort: 8082,
metrics: new Set(),
},
workers: [ImmichWorker.Api, ImmichWorker.Microservices],
plugins: {
external: {
allow: true,
installFolder: '/app/data/plugins',
},
},
noColor: false,
};
export const mockEnvData = (config: Partial<EnvData>) => ({ ...envData, ...config });
export const newConfigRepositoryMock = (): Mocked<RepositoryInterface<ConfigRepository>> => {
return {
getEnv: vitest.fn().mockReturnValue(mockEnvData({})),
getWorker: vitest.fn().mockReturnValue(ImmichWorker.Api),
isDev: vitest.fn().mockReturnValue(false),
};
};

View file

@ -0,0 +1,19 @@
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
export const newCryptoRepositoryMock = (): Mocked<RepositoryInterface<CryptoRepository>> => {
return {
randomUUID: vitest.fn().mockReturnValue('random-uuid'),
randomBytes: vitest.fn().mockReturnValue(Buffer.from('random-bytes', 'utf8')),
compareBcrypt: vitest.fn().mockReturnValue(true),
hashBcrypt: vitest.fn().mockImplementation((input) => Promise.resolve(`${input} (hashed)`)),
hashSha256: vitest.fn().mockImplementation((input) => `${input} (hashed)`),
verifySha256: vitest.fn().mockImplementation(() => true),
hashSha1: vitest.fn().mockImplementation((input) => Buffer.from(`${input.toString()} (hashed)`)),
hashFile: vitest.fn().mockImplementation((input) => `${input} (file-hashed)`),
randomBytesAsText: vitest.fn().mockReturnValue(Buffer.from('random-bytes').toString('base64')),
signJwt: vitest.fn().mockReturnValue('mock-jwt-token'),
verifyJwt: vitest.fn().mockImplementation((token) => ({ verified: true, token })),
};
};

View file

@ -0,0 +1,29 @@
import { DatabaseRepository } from 'src/repositories/database.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
export const newDatabaseRepositoryMock = (): Mocked<RepositoryInterface<DatabaseRepository>> => {
return {
shutdown: vitest.fn(),
getExtensionVersions: vitest.fn(),
getVectorExtension: vitest.fn(),
getExtensionVersionRange: vitest.fn(),
getPostgresVersion: vitest.fn().mockResolvedValue('14.10 (Debian 14.10-1.pgdg120+1)'),
getPostgresVersionRange: vitest.fn().mockReturnValue('>=14.0.0'),
createExtension: vitest.fn().mockResolvedValue(void 0),
dropExtension: vitest.fn(),
updateVectorExtension: vitest.fn(),
reindexVectorsIfNeeded: vitest.fn(),
getDimensionSize: vitest.fn(),
setDimensionSize: vitest.fn(),
deleteAllSearchEmbeddings: vitest.fn(),
prewarm: vitest.fn(),
runMigrations: vitest.fn(),
revertLastMigration: vitest.fn(),
withLock: vitest.fn().mockImplementation((_, function_: <R>() => Promise<R>) => function_()),
tryLock: vitest.fn(),
isBusy: vitest.fn(),
wait: vitest.fn(),
migrateFilePaths: vitest.fn(),
};
};

View file

@ -0,0 +1,24 @@
import { JobRepository } from 'src/repositories/job.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
export const newJobRepositoryMock = (): Mocked<RepositoryInterface<JobRepository>> => {
return {
setup: vitest.fn(),
startWorkers: vitest.fn(),
run: vitest.fn(),
setConcurrency: vitest.fn(),
empty: vitest.fn(),
pause: vitest.fn(),
resume: vitest.fn(),
searchJobs: vitest.fn(),
queue: vitest.fn().mockImplementation(() => Promise.resolve()),
queueAll: vitest.fn().mockImplementation(() => Promise.resolve()),
isActive: vitest.fn(),
isPaused: vitest.fn(),
getJobCounts: vitest.fn(),
clear: vitest.fn(),
waitForQueueCompletion: vitest.fn(),
removeJob: vitest.fn(),
};
};

View file

@ -0,0 +1,17 @@
import { MediaRepository } from 'src/repositories/media.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
export const newMediaRepositoryMock = (): Mocked<RepositoryInterface<MediaRepository>> => {
return {
generateThumbnail: vitest.fn().mockImplementation(() => Promise.resolve()),
writeExif: vitest.fn().mockImplementation(() => Promise.resolve()),
copyTagGroup: vitest.fn().mockImplementation(() => Promise.resolve()),
generateThumbhash: vitest.fn().mockResolvedValue(Buffer.from('')),
decodeImage: vitest.fn().mockResolvedValue({ data: Buffer.from(''), info: {} }),
extract: vitest.fn().mockResolvedValue(null),
probe: vitest.fn(),
transcode: vitest.fn(),
getImageDimensions: vitest.fn(),
};
};

View file

@ -0,0 +1,13 @@
import { MetadataRepository } from 'src/repositories/metadata.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
export const newMetadataRepositoryMock = (): Mocked<RepositoryInterface<MetadataRepository>> => {
return {
setMaxConcurrency: vitest.fn(),
teardown: vitest.fn(),
readTags: vitest.fn(),
writeTags: vitest.fn(),
extractBinaryTag: vitest.fn(),
};
};

View file

@ -0,0 +1,78 @@
import { ChokidarOptions } from 'chokidar';
import { StorageCore } from 'src/cores/storage.core';
import { StorageRepository, WatchEvents } from 'src/repositories/storage.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
interface MockWatcherOptions {
items?: Array<{ event: 'change' | 'add' | 'unlink' | 'error'; value: string }>;
close?: () => Promise<void>;
}
export const makeMockWatcher =
({ items, close }: MockWatcherOptions) =>
(paths: string[], options: ChokidarOptions, events: Partial<WatchEvents>) => {
events.onReady?.();
for (const item of items || []) {
switch (item.event) {
case 'add': {
events.onAdd?.(item.value);
break;
}
case 'change': {
events.onChange?.(item.value);
break;
}
case 'unlink': {
events.onUnlink?.(item.value);
break;
}
case 'error': {
events.onError?.(new Error(item.value));
break;
}
}
}
if (close) {
return () => close();
}
// eslint-disable-next-line unicorn/consistent-function-scoping
return () => Promise.resolve();
};
export const newStorageRepositoryMock = (): Mocked<RepositoryInterface<StorageRepository>> => {
StorageCore.reset();
StorageCore.setMediaLocation('/data');
return {
createZipStream: vitest.fn(),
createReadStream: vitest.fn(),
createPlainReadStream: vitest.fn(),
createGzip: vitest.fn(),
createGunzip: vitest.fn(),
readFile: vitest.fn(),
readTextFile: vitest.fn(),
createFile: vitest.fn(),
createWriteStream: vitest.fn(),
createOrOverwriteFile: vitest.fn(),
existsSync: vitest.fn(),
overwriteFile: vitest.fn(),
unlink: vitest.fn(),
unlinkDir: vitest.fn().mockResolvedValue(true),
removeEmptyDirs: vitest.fn(),
checkFileExists: vitest.fn(),
mkdirSync: vitest.fn(),
checkDiskUsage: vitest.fn(),
readdir: vitest.fn(),
realpath: vitest.fn().mockImplementation((filepath: string) => Promise.resolve(filepath)),
stat: vitest.fn(),
crawl: vitest.fn(),
walk: vitest.fn().mockImplementation(async function* () {}),
rename: vitest.fn(),
copyFile: vitest.fn(),
utimes: vitest.fn(),
watch: vitest.fn().mockImplementation(makeMockWatcher({})),
};
};

View file

@ -0,0 +1,14 @@
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { RepositoryInterface } from 'src/types';
import { clearConfigCache } from 'src/utils/config';
import { Mocked, vitest } from 'vitest';
export const newSystemMetadataRepositoryMock = (): Mocked<RepositoryInterface<SystemMetadataRepository>> => {
clearConfigCache();
return {
get: vitest.fn() as any,
set: vitest.fn(),
delete: vitest.fn(),
readFile: vitest.fn(),
};
};

View file

@ -0,0 +1,28 @@
import { TelemetryRepository } from 'src/repositories/telemetry.repository';
import { RepositoryInterface } from 'src/types';
import { Mocked, vitest } from 'vitest';
const newMetricGroupMock = () => {
return {
addToCounter: vitest.fn(),
addToGauge: vitest.fn(),
addToHistogram: vitest.fn(),
configure: vitest.fn(),
};
};
type ITelemetryRepository = RepositoryInterface<TelemetryRepository>;
export type ITelemetryRepositoryMock = {
[K in keyof ITelemetryRepository]: Mocked<RepositoryInterface<ITelemetryRepository[K]>>;
};
export const newTelemetryRepositoryMock = (): ITelemetryRepositoryMock => {
return {
setup: vitest.fn(),
api: newMetricGroupMock(),
host: newMetricGroupMock(),
jobs: newMetricGroupMock(),
repo: newMetricGroupMock(),
};
};

View file

@ -0,0 +1,543 @@
import {
Activity,
ApiKey,
AssetFace,
AssetFile,
AuthApiKey,
AuthSharedLink,
AuthUser,
Exif,
Library,
Memory,
Partner,
Person,
Session,
Stack,
Tag,
User,
UserAdmin,
} from 'src/database';
import { MapAsset } from 'src/dtos/asset-response.dto';
import { AuthDto } from 'src/dtos/auth.dto';
import { AssetEditAction, AssetEditActionItem, MirrorAxis } from 'src/dtos/editing.dto';
import { QueueStatisticsDto } from 'src/dtos/queue.dto';
import {
AssetFileType,
AssetStatus,
AssetType,
AssetVisibility,
MemoryType,
Permission,
SourceType,
UserMetadataKey,
UserStatus,
} from 'src/enum';
import { DeepPartial, OnThisDayData, UserMetadataItem } from 'src/types';
import { v4, v7 } from 'uuid';
export const newUuid = () => v4();
export const newUuids = () =>
Array.from({ length: 100 })
.fill(0)
.map(() => newUuid());
export const newDate = () => new Date();
export const newUuidV7 = () => v7();
export const newSha1 = () => Buffer.from('this is a fake hash');
export const newEmbedding = () => {
const embedding = Array.from({ length: 512 })
.fill(0)
.map(() => Math.random());
return '[' + embedding + ']';
};
const authFactory = ({
apiKey,
session,
sharedLink,
user,
}: {
apiKey?: Partial<AuthApiKey>;
session?: { id: string };
user?: Omit<
Partial<UserAdmin>,
'createdAt' | 'updatedAt' | 'deletedAt' | 'fileCreatedAt' | 'fileModifiedAt' | 'localDateTime' | 'profileChangedAt'
>;
sharedLink?: Partial<AuthSharedLink>;
} = {}) => {
const auth: AuthDto = {
user: authUserFactory(userAdminFactory(user ?? {})),
};
const userId = auth.user.id;
if (apiKey) {
auth.apiKey = authApiKeyFactory(apiKey);
}
if (session) {
auth.session = {
id: session.id,
hasElevatedPermission: false,
};
}
if (sharedLink) {
auth.sharedLink = authSharedLinkFactory({ ...sharedLink, userId });
}
return auth;
};
const authSharedLinkFactory = (sharedLink: Partial<AuthSharedLink> = {}) => {
const {
id = newUuid(),
expiresAt = null,
userId = newUuid(),
showExif = true,
allowUpload = false,
allowDownload = true,
password = null,
} = sharedLink;
return { id, expiresAt, userId, showExif, allowUpload, allowDownload, password };
};
const authApiKeyFactory = (apiKey: Partial<AuthApiKey> = {}) => ({
id: newUuid(),
permissions: [Permission.All],
...apiKey,
});
const authUserFactory = (authUser: Partial<AuthUser> = {}) => {
const {
id = newUuid(),
isAdmin = false,
name = 'Test User',
email = 'test@immich.cloud',
quotaUsageInBytes = 0,
quotaSizeInBytes = null,
} = authUser;
return { id, isAdmin, name, email, quotaUsageInBytes, quotaSizeInBytes };
};
const partnerFactory = (partner: Partial<Partner> = {}) => {
const sharedBy = userFactory(partner.sharedBy || {});
const sharedWith = userFactory(partner.sharedWith || {});
return {
sharedById: sharedBy.id,
sharedBy,
sharedWithId: sharedWith.id,
sharedWith,
createId: newUuidV7(),
createdAt: newDate(),
updatedAt: newDate(),
updateId: newUuidV7(),
inTimeline: true,
...partner,
};
};
const sessionFactory = (session: Partial<Session> = {}) => ({
id: newUuid(),
createdAt: newDate(),
updatedAt: newDate(),
updateId: newUuidV7(),
deviceOS: 'android',
deviceType: 'mobile',
token: 'abc123',
parentId: null,
expiresAt: null,
userId: newUuid(),
pinExpiresAt: newDate(),
isPendingSyncReset: false,
appVersion: session.appVersion ?? null,
...session,
});
const queueStatisticsFactory = (dto?: Partial<QueueStatisticsDto>) => ({
active: 0,
completed: 0,
failed: 0,
delayed: 0,
waiting: 0,
paused: 0,
...dto,
});
const stackFactory = ({ owner, assets, ...stack }: DeepPartial<Stack> = {}): Stack => {
const ownerId = newUuid();
return {
id: newUuid(),
primaryAssetId: assets?.[0].id ?? newUuid(),
ownerId,
owner: userFactory(owner ?? { id: ownerId }),
assets: assets?.map((asset) => assetFactory(asset)) ?? [],
...stack,
};
};
const userFactory = (user: Partial<User> = {}) => ({
id: newUuid(),
name: 'Test User',
email: 'test@immich.cloud',
avatarColor: null,
profileImagePath: '',
profileChangedAt: newDate(),
metadata: [
{
key: UserMetadataKey.Onboarding,
value: 'true',
},
] as UserMetadataItem[],
...user,
});
const userAdminFactory = (user: Partial<UserAdmin> = {}) => {
const {
id = newUuid(),
name = 'Test User',
email = 'test@immich.cloud',
profileImagePath = '',
profileChangedAt = newDate(),
storageLabel = null,
shouldChangePassword = false,
isAdmin = false,
avatarColor = null,
createdAt = newDate(),
updatedAt = newDate(),
deletedAt = null,
oauthId = '',
quotaSizeInBytes = null,
quotaUsageInBytes = 0,
status = UserStatus.Active,
metadata = [],
} = user;
return {
id,
name,
email,
profileImagePath,
profileChangedAt,
storageLabel,
shouldChangePassword,
isAdmin,
avatarColor,
createdAt,
updatedAt,
deletedAt,
oauthId,
quotaSizeInBytes,
quotaUsageInBytes,
status,
metadata,
};
};
const assetFactory = (
asset: Omit<DeepPartial<MapAsset>, 'exifInfo' | 'owner' | 'stack' | 'tags' | 'faces' | 'files' | 'edits'> = {},
) => {
return {
id: newUuid(),
createdAt: newDate(),
updatedAt: newDate(),
deletedAt: null,
updateId: newUuidV7(),
status: AssetStatus.Active,
checksum: newSha1(),
deviceAssetId: '',
deviceId: '',
duplicateId: null,
duration: null,
encodedVideoPath: null,
fileCreatedAt: newDate(),
fileModifiedAt: newDate(),
isExternal: false,
isFavorite: false,
isOffline: false,
libraryId: null,
livePhotoVideoId: null,
localDateTime: newDate(),
originalFileName: 'IMG_123.jpg',
originalPath: `/data/12/34/IMG_123.jpg`,
ownerId: newUuid(),
stackId: null,
thumbhash: null,
type: AssetType.Image,
visibility: AssetVisibility.Timeline,
width: null,
height: null,
isEdited: false,
...asset,
};
};
const activityFactory = (activity: Partial<Activity> = {}) => {
const userId = activity.userId || newUuid();
return {
id: newUuid(),
comment: null,
isLiked: false,
userId,
user: userFactory({ id: userId }),
assetId: newUuid(),
albumId: newUuid(),
createdAt: newDate(),
updatedAt: newDate(),
updateId: newUuidV7(),
...activity,
};
};
const apiKeyFactory = (apiKey: Partial<ApiKey> = {}) => ({
id: newUuid(),
userId: newUuid(),
createdAt: newDate(),
updatedAt: newDate(),
updateId: newUuidV7(),
name: 'Api Key',
permissions: [Permission.All],
...apiKey,
});
const libraryFactory = (library: Partial<Library> = {}) => ({
id: newUuid(),
createdAt: newDate(),
updatedAt: newDate(),
updateId: newUuidV7(),
deletedAt: null,
refreshedAt: null,
name: 'Library',
assets: [],
ownerId: newUuid(),
importPaths: [],
exclusionPatterns: [],
...library,
});
const memoryFactory = (memory: Partial<Memory> = {}) => ({
id: newUuid(),
createdAt: newDate(),
updatedAt: newDate(),
updateId: newUuidV7(),
deletedAt: null,
ownerId: newUuid(),
type: MemoryType.OnThisDay,
data: { year: 2024 } as OnThisDayData,
isSaved: false,
memoryAt: newDate(),
seenAt: null,
showAt: newDate(),
hideAt: newDate(),
assets: [],
...memory,
});
const versionHistoryFactory = () => ({
id: newUuid(),
createdAt: newDate(),
version: '1.123.45',
});
const assetSidecarWriteFactory = () => {
const id = newUuid();
return {
id,
originalPath: '/path/to/original-path.jpg.xmp',
tags: [],
files: [
{
id: newUuid(),
path: '/path/to/original-path.jpg.xmp',
type: AssetFileType.Sidecar,
isEdited: false,
},
],
exifInfo: {
assetId: id,
description: 'this is a description',
latitude: 12,
longitude: 12,
dateTimeOriginal: '2023-11-22T04:56:12.196Z',
} as unknown as Exif,
};
};
const assetOcrFactory = (
ocr: {
id?: string;
assetId?: string;
x1?: number;
y1?: number;
x2?: number;
y2?: number;
x3?: number;
y3?: number;
x4?: number;
y4?: number;
boxScore?: number;
textScore?: number;
text?: string;
isVisible?: boolean;
} = {},
) => ({
id: newUuid(),
assetId: newUuid(),
x1: 0.1,
y1: 0.2,
x2: 0.3,
y2: 0.2,
x3: 0.3,
y3: 0.4,
x4: 0.1,
y4: 0.4,
boxScore: 0.95,
textScore: 0.92,
text: 'Sample Text',
isVisible: true,
...ocr,
});
const assetFileFactory = (file: Partial<AssetFile> = {}) => ({
id: newUuid(),
type: AssetFileType.Preview,
path: '/uploads/user-id/thumbs/path.jpg',
isEdited: false,
isProgressive: false,
...file,
});
const exifFactory = (exif: Partial<Exif> = {}) => ({
assetId: newUuid(),
autoStackId: null,
bitsPerSample: null,
city: 'Austin',
colorspace: null,
country: 'United States of America',
dateTimeOriginal: newDate(),
description: '',
exifImageHeight: 420,
exifImageWidth: 42,
exposureTime: null,
fileSizeInByte: 69,
fNumber: 1.7,
focalLength: 4.38,
fps: null,
iso: 947,
latitude: 30.267_334_570_570_195,
longitude: -97.789_833_534_282_07,
lensModel: null,
livePhotoCID: null,
make: 'Google',
model: 'Pixel 7',
modifyDate: newDate(),
orientation: '1',
profileDescription: null,
projectionType: null,
rating: 4,
state: 'Texas',
tags: ['parent/child'],
timeZone: 'UTC-6',
...exif,
});
const tagFactory = (tag: Partial<Tag>): Tag => ({
id: newUuid(),
color: null,
createdAt: newDate(),
parentId: null,
updatedAt: newDate(),
value: `tag-${newUuid()}`,
...tag,
});
const faceFactory = ({ person, ...face }: DeepPartial<AssetFace> = {}): AssetFace => ({
assetId: newUuid(),
boundingBoxX1: 1,
boundingBoxX2: 2,
boundingBoxY1: 1,
boundingBoxY2: 2,
deletedAt: null,
id: newUuid(),
imageHeight: 420,
imageWidth: 42,
isVisible: true,
personId: null,
sourceType: SourceType.MachineLearning,
updatedAt: newDate(),
updateId: newUuidV7(),
person: person === null ? null : personFactory(person),
...face,
});
const assetEditFactory = (edit?: Partial<AssetEditActionItem>): AssetEditActionItem => {
switch (edit?.action) {
case AssetEditAction.Crop: {
return { action: AssetEditAction.Crop, parameters: { height: 42, width: 42, x: 0, y: 10 }, ...edit };
}
case AssetEditAction.Mirror: {
return { action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal }, ...edit };
}
case AssetEditAction.Rotate: {
return { action: AssetEditAction.Rotate, parameters: { angle: 90 }, ...edit };
}
default: {
return { action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Vertical } };
}
}
};
const personFactory = (person?: Partial<Person>): Person => ({
birthDate: newDate(),
color: null,
createdAt: newDate(),
faceAssetId: null,
id: newUuid(),
isFavorite: false,
isHidden: false,
name: 'person',
ownerId: newUuid(),
thumbnailPath: '/path/to/person/thumbnail.jpg',
updatedAt: newDate(),
updateId: newUuidV7(),
...person,
});
export const factory = {
activity: activityFactory,
apiKey: apiKeyFactory,
asset: assetFactory,
assetFile: assetFileFactory,
assetOcr: assetOcrFactory,
auth: authFactory,
authApiKey: authApiKeyFactory,
authUser: authUserFactory,
library: libraryFactory,
memory: memoryFactory,
partner: partnerFactory,
queueStatistics: queueStatisticsFactory,
session: sessionFactory,
stack: stackFactory,
user: userFactory,
userAdmin: userAdminFactory,
versionHistory: versionHistoryFactory,
jobAssets: {
sidecarWrite: assetSidecarWriteFactory,
},
exif: exifFactory,
face: faceFactory,
person: personFactory,
assetEdit: assetEditFactory,
tag: tagFactory,
uuid: newUuid,
date: newDate,
responses: {
badRequest: (message: any = null) => ({
error: 'Bad Request',
statusCode: 400,
message: message ?? expect.anything(),
}),
},
};

View file

@ -0,0 +1,48 @@
import { Check, Column, ConstraintType, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
@Check({ expression: '1=1' })
export class Table1 {
@Column({ type: 'uuid' })
id!: string;
}
export const description = 'should create a check constraint with a default name';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'id',
tableName: 'table1',
type: 'uuid',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [
{
type: ConstraintType.CHECK,
name: 'CHK_8d2ecfd49b984941f6b2589799',
tableName: 'table1',
expression: '1=1',
synchronize: true,
},
],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,48 @@
import { Check, Column, ConstraintType, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
@Check({ name: 'CHK_test', expression: '1=1' })
export class Table1 {
@Column({ type: 'uuid' })
id!: string;
}
export const description = 'should create a check constraint with a specific name';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'id',
tableName: 'table1',
type: 'uuid',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [
{
type: ConstraintType.CHECK,
name: 'CHK_test',
tableName: 'table1',
expression: '1=1',
synchronize: true,
},
],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,40 @@
import { CreateDateColumn, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@CreateDateColumn()
createdAt!: string;
}
export const description = 'should register a table with an created at date column';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'createdAt',
tableName: 'table1',
type: 'timestamp with time zone',
default: 'now()',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,40 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ type: 'character varying', array: true, default: [] })
column1!: string[];
}
export const description = 'should register a table with a column with a default value (array)';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: true,
primary: false,
synchronize: true,
default: "'{}'",
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,40 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ type: 'boolean', default: true })
column1!: boolean;
}
export const description = 'should register a table with a column with a default value (boolean)';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'boolean',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
default: 'true',
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,42 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
const date = new Date(2023, 0, 1);
@Table()
export class Table1 {
@Column({ type: 'character varying', default: date })
column1!: string;
}
export const description = 'should register a table with a column with a default value (date)';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
default: "'2023-01-01T00:00:00.000Z'",
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,40 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ type: 'character varying', default: () => 'now()' })
column1!: string;
}
export const description = 'should register a table with a column with a default function';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
default: 'now()',
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,39 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ type: 'character varying', default: null })
column1!: string;
}
export const description = 'should register a nullable column from a default of null';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: true,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,40 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ type: 'integer', default: 0 })
column1!: string;
}
export const description = 'should register a table with a column with a default value (number)';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'integer',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
default: '0',
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,40 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ type: 'character varying', default: 'foo' })
column1!: string;
}
export const description = 'should register a table with a column with a default value (string)';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
default: "'foo'",
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,39 @@
import { DatabaseSchema, DeleteDateColumn, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@DeleteDateColumn()
deletedAt!: string;
}
export const description = 'should register a table with a deleted at date column';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'deletedAt',
tableName: 'table1',
type: 'timestamp with time zone',
nullable: true,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,53 @@
import { Column, DatabaseSchema, registerEnum, Table } from 'src/sql-tools';
enum Test {
Foo = 'foo',
Bar = 'bar',
}
const test_enum = registerEnum({ name: 'test_enum', values: Object.values(Test) });
@Table()
export class Table1 {
@Column({ enum: test_enum })
column1!: string;
}
export const description = 'should accept an enum type';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [
{
name: 'test_enum',
values: ['foo', 'bar'],
synchronize: true,
},
],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'enum',
enumName: 'test_enum',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,48 @@
import { ConstraintType, DatabaseSchema, PrimaryGeneratedColumn, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@PrimaryGeneratedColumn({ strategy: 'identity' })
column1!: string;
}
export const description = 'should register a table with a generated identity column';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'integer',
identity: true,
nullable: false,
isArray: false,
primary: true,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [
{
type: ConstraintType.PRIMARY_KEY,
name: 'PK_50c4f9905061b1e506d38a2a380',
tableName: 'table1',
columnNames: ['column1'],
synchronize: true,
},
],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,48 @@
import { ConstraintType, DatabaseSchema, PrimaryGeneratedColumn, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@PrimaryGeneratedColumn({ strategy: 'uuid' })
column1!: string;
}
export const description = 'should register a table with a primary generated uuid column';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'uuid',
default: 'uuid_generate_v4()',
nullable: false,
isArray: false,
primary: true,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [
{
type: ConstraintType.PRIMARY_KEY,
name: 'PK_50c4f9905061b1e506d38a2a380',
tableName: 'table1',
columnNames: ['column1'],
synchronize: true,
},
],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,47 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ index: true })
column1!: string;
}
export const description = 'should create a column with an index';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [
{
name: 'IDX_50c4f9905061b1e506d38a2a38',
columnNames: ['column1'],
tableName: 'table1',
unique: false,
synchronize: true,
},
],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,47 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ indexName: 'IDX_test' })
column1!: string;
}
export const description = 'should create a column with an index if a name is provided';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [
{
name: 'IDX_test',
columnNames: ['column1'],
tableName: 'table1',
unique: false,
synchronize: true,
},
],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,39 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ default: null })
column1!: string;
}
export const description = 'should infer nullable from the default value';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: true,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,39 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column()
column1!: string;
}
export const description = 'should register a table with a column with a default name';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,39 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ name: 'column-1' })
column1!: string;
}
export const description = 'should register a table with a column with a specific name';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column-1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,39 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column('column-1')
column1!: string;
}
export const description = 'should register a table with a column with a specific name';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column-1',
tableName: 'table1',
type: 'character varying',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,39 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ nullable: true })
column1!: string;
}
export const description = 'should set nullable correctly';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
nullable: true,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,40 @@
import { Column, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ length: 2 })
column1!: string;
}
export const description = 'should use create a string column with a fixed length';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'column1',
tableName: 'table1',
type: 'character varying',
length: 2,
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,47 @@
import { Column, ConstraintType, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ type: 'uuid', unique: true })
id!: string;
}
export const description = 'should create a unique key constraint with a default name';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'id',
tableName: 'table1',
type: 'uuid',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [
{
type: ConstraintType.UNIQUE,
name: 'UQ_b249cc64cf63b8a22557cdc8537',
tableName: 'table1',
columnNames: ['id'],
synchronize: true,
},
],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,47 @@
import { Column, ConstraintType, DatabaseSchema, Table } from 'src/sql-tools';
@Table()
export class Table1 {
@Column({ type: 'uuid', unique: true, uniqueConstraintName: 'UQ_test' })
id!: string;
}
export const description = 'should create a unique key constraint with a specific name';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'id',
tableName: 'table1',
type: 'uuid',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [
{
type: ConstraintType.UNIQUE,
name: 'UQ_test',
tableName: 'table1',
columnNames: ['id'],
synchronize: true,
},
],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,40 @@
import { DatabaseSchema, Table, UpdateDateColumn } from 'src/sql-tools';
@Table()
export class Table1 {
@UpdateDateColumn()
updatedAt!: string;
}
export const description = 'should register a table with an updated at date column';
export const schema: DatabaseSchema = {
databaseName: 'postgres',
schemaName: 'public',
functions: [],
enums: [],
extensions: [],
parameters: [],
overrides: [],
tables: [
{
name: 'table1',
columns: [
{
name: 'updatedAt',
tableName: 'table1',
type: 'timestamp with time zone',
default: 'now()',
nullable: false,
isArray: false,
primary: false,
synchronize: true,
},
],
indexes: [],
triggers: [],
constraints: [],
synchronize: true,
},
],
warnings: [],
};

View file

@ -0,0 +1,7 @@
import { Table } from 'src/sql-tools';
@Table({ name: 'table-1' })
@Table({ name: 'table-2' })
export class Table1 {}
export const message = 'Table table-2 has already been registered';

Some files were not shown because too many files have changed in this diff Show more