Source Code added
This commit is contained in:
parent
800376eafd
commit
9efa9bc6dd
3912 changed files with 754770 additions and 2 deletions
261
server/test/medium/specs/services/asset-media.service.spec.ts
Normal file
261
server/test/medium/specs/services/asset-media.service.spec.ts
Normal file
|
|
@ -0,0 +1,261 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetMediaStatus } from 'src/dtos/asset-media-response.dto';
|
||||
import { AssetMediaSize } from 'src/dtos/asset-media.dto';
|
||||
import { AssetFileType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetMediaService } from 'src/services/asset-media.service';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { mediumFactory, newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(AssetMediaService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AccessRepository, AssetRepository, UserRepository],
|
||||
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AssetService.name, () => {
|
||||
describe('uploadAsset', () => {
|
||||
it('should work', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const file = mediumFactory.uploadFile();
|
||||
|
||||
await expect(
|
||||
sut.uploadAsset(
|
||||
auth,
|
||||
{
|
||||
deviceId: 'some-id',
|
||||
deviceAssetId: 'some-id',
|
||||
fileModifiedAt: new Date(),
|
||||
fileCreatedAt: new Date(),
|
||||
assetData: Buffer.from('some data'),
|
||||
},
|
||||
file,
|
||||
),
|
||||
).resolves.toEqual({
|
||||
id: expect.any(String),
|
||||
status: AssetMediaStatus.CREATED,
|
||||
});
|
||||
|
||||
expect(ctx.getMock(EventRepository).emit).toHaveBeenCalledWith('AssetCreate', {
|
||||
asset: expect.objectContaining({ deviceAssetId: 'some-id' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with an empty metadata list', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const file = mediumFactory.uploadFile();
|
||||
|
||||
await expect(
|
||||
sut.uploadAsset(
|
||||
auth,
|
||||
{
|
||||
deviceId: 'some-id',
|
||||
deviceAssetId: 'some-id',
|
||||
fileModifiedAt: new Date(),
|
||||
fileCreatedAt: new Date(),
|
||||
assetData: Buffer.from('some data'),
|
||||
metadata: [],
|
||||
},
|
||||
file,
|
||||
),
|
||||
).resolves.toEqual({
|
||||
id: expect.any(String),
|
||||
status: AssetMediaStatus.CREATED,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('viewThumbnail', () => {
|
||||
it('should return original thumbnail by default when both exist', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create both original and edited thumbnails
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/edited/preview.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
|
||||
});
|
||||
|
||||
it('should return edited thumbnail when edited=true', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create both original and edited thumbnails
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/edited/preview.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: true });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/edited/preview.jpg');
|
||||
});
|
||||
|
||||
it('should return original thumbnail when edited=false', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create both original and edited thumbnails
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/edited/preview.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: false });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
|
||||
});
|
||||
|
||||
it('should return original thumbnail when only original exists and edited=false', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create only original thumbnail
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: false });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
|
||||
});
|
||||
|
||||
it('should return original thumbnail when only original exists and edited=true', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create only original thumbnail
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: true });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
|
||||
});
|
||||
|
||||
it('should work with thumbnail size', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create both original and edited thumbnails
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/original/thumbnail.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/edited/thumbnail.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
// Test default (should get original)
|
||||
const resultDefault = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.THUMBNAIL });
|
||||
expect(resultDefault).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((resultDefault as ImmichFileResponse).path).toBe('/original/thumbnail.jpg');
|
||||
|
||||
// Test edited=true (should get edited)
|
||||
const resultEdited = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.THUMBNAIL, edited: true });
|
||||
expect(resultEdited).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((resultEdited as ImmichFileResponse).path).toBe('/edited/thumbnail.jpg');
|
||||
});
|
||||
});
|
||||
});
|
||||
606
server/test/medium/specs/services/asset.service.spec.ts
Normal file
606
server/test/medium/specs/services/asset.service.spec.ts
Normal file
|
|
@ -0,0 +1,606 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetFileType, AssetMetadataKey, JobName, SharedLinkType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
|
||||
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
|
||||
import { StackRepository } from 'src/repositories/stack.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(AssetService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [
|
||||
AssetRepository,
|
||||
AssetJobRepository,
|
||||
AlbumRepository,
|
||||
AccessRepository,
|
||||
SharedLinkAssetRepository,
|
||||
StackRepository,
|
||||
UserRepository,
|
||||
],
|
||||
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AssetService.name, () => {
|
||||
describe('getStatistics', () => {
|
||||
it('should return stats as numbers, not strings', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await expect(sut.getStatistics(auth, {})).resolves.toEqual({ images: 1, total: 1, videos: 0 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('copy', () => {
|
||||
it('should copy albums', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const albumRepo = ctx.get(AlbumRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const { album } = await ctx.newAlbum({ ownerId: user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: oldAsset.id });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
await expect(albumRepo.getAssetIds(album.id, [oldAsset.id, newAsset.id])).resolves.toEqual(
|
||||
new Set([oldAsset.id, newAsset.id]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should copy shared links', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const sharedLinkRepo = ctx.get(SharedLinkRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
|
||||
const { id: sharedLinkId } = await sharedLinkRepo.create({
|
||||
allowUpload: false,
|
||||
key: Buffer.from('123'),
|
||||
type: SharedLinkType.Individual,
|
||||
userId: user.id,
|
||||
assetIds: [oldAsset.id],
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
await expect(sharedLinkRepo.get(user.id, sharedLinkId)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
assets: [expect.objectContaining({ id: oldAsset.id }), expect.objectContaining({ id: newAsset.id })],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should merge stacks', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: asset1.id, description: 'bar' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
await ctx.newExif({ assetId: asset2.id, description: 'foo' });
|
||||
|
||||
await ctx.newStack({ ownerId: user.id }, [oldAsset.id, asset1.id]);
|
||||
|
||||
const {
|
||||
stack: { id: newStackId },
|
||||
} = await ctx.newStack({ ownerId: user.id }, [newAsset.id, asset2.id]);
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
await expect(stackRepo.getById(oldAsset.id)).resolves.toEqual(undefined);
|
||||
|
||||
const newStack = await stackRepo.getById(newStackId);
|
||||
expect(newStack).toEqual(
|
||||
expect.objectContaining({
|
||||
primaryAssetId: newAsset.id,
|
||||
assets: expect.arrayContaining([expect.objectContaining({ id: asset2.id })]),
|
||||
}),
|
||||
);
|
||||
expect(newStack!.assets.length).toEqual(4);
|
||||
});
|
||||
|
||||
it('should copy stack', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: asset1.id, description: 'bar' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
|
||||
const {
|
||||
stack: { id: stackId },
|
||||
} = await ctx.newStack({ ownerId: user.id }, [oldAsset.id, asset1.id]);
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
const stack = await stackRepo.getById(stackId);
|
||||
expect(stack).toEqual(
|
||||
expect.objectContaining({
|
||||
primaryAssetId: oldAsset.id,
|
||||
assets: expect.arrayContaining([expect.objectContaining({ id: newAsset.id })]),
|
||||
}),
|
||||
);
|
||||
expect(stack!.assets.length).toEqual(3);
|
||||
});
|
||||
|
||||
it('should copy favorite status', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id, isFavorite: true });
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
await expect(assetRepo.getById(newAsset.id)).resolves.toEqual(expect.objectContaining({ isFavorite: true }));
|
||||
});
|
||||
|
||||
it('should copy sidecar file', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const storageRepo = ctx.getMock(StorageRepository);
|
||||
const jobRepo = ctx.getMock(JobRepository);
|
||||
|
||||
storageRepo.copyFile.mockResolvedValue();
|
||||
jobRepo.queue.mockResolvedValue();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newAssetFile({
|
||||
assetId: oldAsset.id,
|
||||
path: '/path/to/my/sidecar.xmp',
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
expect(storageRepo.copyFile).toHaveBeenCalledWith('/path/to/my/sidecar.xmp', `${newAsset.originalPath}.xmp`);
|
||||
|
||||
expect(jobRepo.queue).toHaveBeenCalledWith({
|
||||
name: JobName.AssetExtractMetadata,
|
||||
data: { id: newAsset.id },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const thumbnailPath = '/path/to/thumbnail.jpg';
|
||||
const previewPath = '/path/to/preview.jpg';
|
||||
const sidecarPath = '/path/to/sidecar.xmp';
|
||||
await Promise.all([
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Thumbnail, path: thumbnailPath }),
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: previewPath }),
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: sidecarPath }),
|
||||
]);
|
||||
|
||||
await sut.handleAssetDeletion({ id: asset.id, deleteOnDisk: true });
|
||||
|
||||
expect(ctx.getMock(JobRepository).queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: [thumbnailPath, previewPath, sidecarPath, asset.originalPath] },
|
||||
});
|
||||
});
|
||||
|
||||
it('should not delete offline assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, isOffline: true });
|
||||
const thumbnailPath = '/path/to/thumbnail.jpg';
|
||||
const previewPath = '/path/to/preview.jpg';
|
||||
await Promise.all([
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Thumbnail, path: thumbnailPath }),
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: previewPath }),
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: `/path/to/sidecar.xmp` }),
|
||||
]);
|
||||
|
||||
await sut.handleAssetDeletion({ id: asset.id, deleteOnDisk: true });
|
||||
|
||||
expect(ctx.getMock(JobRepository).queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: [thumbnailPath, previewPath] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should automatically lock lockable columns', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: null });
|
||||
|
||||
await sut.update(auth, asset.id, {
|
||||
latitude: 42,
|
||||
longitude: 42,
|
||||
rating: 3,
|
||||
description: 'foo',
|
||||
dateTimeOriginal: '2023-11-19T18:11:00+01:00',
|
||||
});
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({
|
||||
lockedProperties: ['timeZone', 'rating', 'description', 'latitude', 'longitude', 'dateTimeOriginal'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should update dateTimeOriginal', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, description: 'test' });
|
||||
|
||||
await sut.update(auth, asset.id, { dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-19T18:11:00+00:00', timeZone: null }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should update dateTimeOriginal with time zone', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, description: 'test' });
|
||||
|
||||
await sut.update(auth, asset.id, { dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-20T01:11:00+00:00', timeZone: 'UTC-7' }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateAll', () => {
|
||||
it('should automatically lock lockable columns', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: null });
|
||||
|
||||
await sut.updateAll(auth, {
|
||||
ids: [asset.id],
|
||||
latitude: 42,
|
||||
description: 'foo',
|
||||
longitude: 42,
|
||||
rating: 3,
|
||||
dateTimeOriginal: '2023-11-19T18:11:00+01:00',
|
||||
});
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({
|
||||
lockedProperties: ['timeZone', 'rating', 'description', 'latitude', 'longitude', 'dateTimeOriginal'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should relatively update assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await sut.updateAll(auth, { ids: [asset.id], dateTimeRelative: -11 });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({
|
||||
dateTimeOriginal: '2023-11-19T18:00:00+00:00',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should update dateTimeOriginal', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, description: 'test' });
|
||||
|
||||
await sut.updateAll(auth, { ids: [asset.id], dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-19T18:11:00+00:00', timeZone: null }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should update dateTimeOriginal with time zone', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, description: 'test' });
|
||||
|
||||
await sut.updateAll(auth, { ids: [asset.id], dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-20T01:11:00+00:00', timeZone: 'UTC-7' }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('upsertBulkMetadata', () => {
|
||||
it('should work', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const items = [{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } }];
|
||||
|
||||
await sut.upsertBulkMetadata(auth, { items });
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata.length).toEqual(1);
|
||||
expect(metadata[0]).toEqual(
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should work on conflict', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'old-id' } });
|
||||
|
||||
// verify existing metadata
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'old-id' } }),
|
||||
]);
|
||||
|
||||
const items = [{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'new-id' } }];
|
||||
await sut.upsertBulkMetadata(auth, { items });
|
||||
|
||||
// verify updated metadata
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'new-id' } }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work with multiple assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const items = [
|
||||
{ assetId: asset1.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
|
||||
{ assetId: asset2.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } },
|
||||
];
|
||||
|
||||
await sut.upsertBulkMetadata(auth, { items });
|
||||
|
||||
const metadata1 = await ctx.get(AssetRepository).getMetadata(asset1.id);
|
||||
expect(metadata1).toEqual([
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } }),
|
||||
]);
|
||||
|
||||
const metadata2 = await ctx.get(AssetRepository).getMetadata(asset2.id);
|
||||
expect(metadata2).toEqual([
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work with multiple metadata for the same asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const items = [
|
||||
{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
|
||||
{ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } },
|
||||
];
|
||||
|
||||
await sut.upsertBulkMetadata(auth, { items });
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: AssetMetadataKey.MobileApp,
|
||||
value: { iCloudId: 'id1' },
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: 'some-other-key',
|
||||
value: { foo: 'bar' },
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBulkMetadata', () => {
|
||||
it('should work', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, { items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }] });
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should work even if the item does not exist', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, { items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }] });
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should work with multiple assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset1.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset2.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, {
|
||||
items: [
|
||||
{ assetId: asset1.id, key: AssetMetadataKey.MobileApp },
|
||||
{ assetId: asset2.id, key: AssetMetadataKey.MobileApp },
|
||||
],
|
||||
});
|
||||
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset1.id)).resolves.toEqual([]);
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset2.id)).resolves.toEqual([]);
|
||||
});
|
||||
|
||||
it('should work with multiple metadata for the same asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, {
|
||||
items: [
|
||||
{ assetId: asset.id, key: AssetMetadataKey.MobileApp },
|
||||
{ assetId: asset.id, key: 'some-other-key' },
|
||||
],
|
||||
});
|
||||
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([]);
|
||||
});
|
||||
|
||||
it('should not delete unspecified keys', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, {
|
||||
items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }],
|
||||
});
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata).toEqual([expect.objectContaining({ key: 'some-other-key', value: { foo: 'bar' } })]);
|
||||
});
|
||||
});
|
||||
});
|
||||
84
server/test/medium/specs/services/audit.database.spec.ts
Normal file
84
server/test/medium/specs/services/audit.database.spec.ts
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { partner_delete_audit, stack_delete_audit } from 'src/schema/functions';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { MediumTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
describe('audit', () => {
|
||||
let ctx: MediumTestContext;
|
||||
|
||||
beforeAll(async () => {
|
||||
ctx = new MediumTestContext(BaseService, {
|
||||
database: await getKyselyDB(),
|
||||
real: [],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
});
|
||||
|
||||
describe(partner_delete_audit.name, () => {
|
||||
it('should not cascade user deletes to partners_audit', async () => {
|
||||
const partnerRepo = ctx.get(PartnerRepository);
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id });
|
||||
await userRepo.delete(user1, true);
|
||||
await expect(
|
||||
ctx.database.selectFrom('partner_audit').select(['id']).where('sharedById', '=', user1.id).execute(),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe(stack_delete_audit.name, () => {
|
||||
it('should not cascade user deletes to stacks_audit', async () => {
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
|
||||
await userRepo.delete(user, true);
|
||||
await expect(
|
||||
ctx.database.selectFrom('stack_audit').select(['id']).where('userId', '=', user.id).execute(),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('assets_audit', () => {
|
||||
it('should not cascade user deletes to assets_audit', async () => {
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await userRepo.delete(user, true);
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset_audit').select(['id']).where('assetId', '=', asset.id).execute(),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exif', () => {
|
||||
it('should automatically set updatedAt and updateId when the row is updated', async () => {
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
|
||||
const before = await ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select(['updatedAt', 'updateId'])
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon 2' });
|
||||
|
||||
const after = await ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select(['updatedAt', 'updateId'])
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
expect(before.updateId).not.toEqual(after.updateId);
|
||||
expect(before.updatedAt).not.toEqual(after.updatedAt);
|
||||
});
|
||||
});
|
||||
});
|
||||
66
server/test/medium/specs/services/auth-admin.service.spec.ts
Normal file
66
server/test/medium/specs/services/auth-admin.service.spec.ts
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { AuthAdminService } from 'src/services/auth-admin.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(AuthAdminService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [UserRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AuthAdminService.name, () => {
|
||||
describe('unlinkAll', () => {
|
||||
it('should reset user.oauthId', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user } = await ctx.newUser({ oauthId: 'test-oauth-id' });
|
||||
const auth = factory.auth();
|
||||
|
||||
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
|
||||
await expect(userRepo.get(user.id, { withDeleted: true })).resolves.toEqual(
|
||||
expect.objectContaining({ oauthId: '' }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should reset a deleted user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user } = await ctx.newUser({ oauthId: 'test-oauth-id', deletedAt: new Date() });
|
||||
const auth = factory.auth();
|
||||
|
||||
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
|
||||
await expect(userRepo.get(user.id, { withDeleted: true })).resolves.toEqual(
|
||||
expect.objectContaining({ oauthId: '' }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should reset multiple users', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user: user1 } = await ctx.newUser({ oauthId: '1' });
|
||||
const { user: user2 } = await ctx.newUser({ oauthId: '2', deletedAt: new Date() });
|
||||
const auth = factory.auth();
|
||||
|
||||
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
|
||||
await expect(userRepo.get(user1.id, { withDeleted: true })).resolves.toEqual(
|
||||
expect.objectContaining({ oauthId: '' }),
|
||||
);
|
||||
await expect(userRepo.get(user2.id, { withDeleted: true })).resolves.toEqual(
|
||||
expect.objectContaining({ oauthId: '' }),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
166
server/test/medium/specs/services/auth.service.spec.ts
Normal file
166
server/test/medium/specs/services/auth.service.spec.ts
Normal file
|
|
@ -0,0 +1,166 @@
|
|||
import { BadRequestException } from '@nestjs/common';
|
||||
import { hash } from 'bcrypt';
|
||||
import { Kysely } from 'kysely';
|
||||
import { AuthType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { CryptoRepository } from 'src/repositories/crypto.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SessionRepository } from 'src/repositories/session.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { TelemetryRepository } from 'src/repositories/telemetry.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { mediumFactory, newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(AuthService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [
|
||||
AccessRepository,
|
||||
ConfigRepository,
|
||||
CryptoRepository,
|
||||
DatabaseRepository,
|
||||
SessionRepository,
|
||||
SystemMetadataRepository,
|
||||
UserRepository,
|
||||
],
|
||||
mock: [LoggingRepository, StorageRepository, EventRepository, TelemetryRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AuthService.name, () => {
|
||||
describe('adminSignUp', () => {
|
||||
it(`should sign up the admin`, async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
|
||||
|
||||
await expect(sut.adminSignUp(dto)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
email: dto.email,
|
||||
name: dto.name,
|
||||
isAdmin: true,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow a second admin to sign up', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
await ctx.newUser({ isAdmin: true });
|
||||
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
|
||||
|
||||
const response = sut.adminSignUp(dto);
|
||||
await expect(response).rejects.toThrow(BadRequestException);
|
||||
await expect(response).rejects.toThrow('The server already has an admin');
|
||||
});
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should reject an incorrect password', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const password = 'password';
|
||||
const passwordHashed = await hash(password, 10);
|
||||
const { user } = await ctx.newUser({ password: passwordHashed });
|
||||
const dto = { email: user.email, password: 'wrong-password' };
|
||||
|
||||
await expect(sut.login(dto, mediumFactory.loginDetails())).rejects.toThrow('Incorrect email or password');
|
||||
});
|
||||
|
||||
it('should accept a correct password and return a login response', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const password = 'password';
|
||||
const passwordHashed = await hash(password, 10);
|
||||
const { user } = await ctx.newUser({ password: passwordHashed });
|
||||
const dto = { email: user.email, password };
|
||||
|
||||
await expect(sut.login(dto, mediumFactory.loginDetails())).resolves.toEqual({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: user.isAdmin,
|
||||
isOnboarded: false,
|
||||
name: user.name,
|
||||
profileImagePath: user.profileImagePath,
|
||||
userId: user.id,
|
||||
userEmail: user.email,
|
||||
shouldChangePassword: user.shouldChangePassword,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('logout', () => {
|
||||
it('should logout', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
});
|
||||
|
||||
it('should cleanup the session', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const sessionRepo = ctx.get(SessionRepository);
|
||||
const eventRepo = ctx.getMock(EventRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { session } = await ctx.newSession({ userId: user.id });
|
||||
const auth = factory.auth({ session, user });
|
||||
eventRepo.emit.mockResolvedValue();
|
||||
|
||||
await expect(sessionRepo.get(session.id)).resolves.toEqual(expect.objectContaining({ id: session.id }));
|
||||
await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
await expect(sessionRepo.get(session.id)).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('changePassword', () => {
|
||||
it('should change the password and login with it', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const dto = { password: 'password', newPassword: 'new-password' };
|
||||
const passwordHashed = await hash(dto.password, 10);
|
||||
const { user } = await ctx.newUser({ password: passwordHashed });
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const response = await sut.changePassword(auth, dto);
|
||||
expect(response).toEqual(
|
||||
expect.objectContaining({
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
}),
|
||||
);
|
||||
expect((response as any).password).not.toBeDefined();
|
||||
|
||||
await expect(
|
||||
sut.login({ email: user.email, password: dto.newPassword }, mediumFactory.loginDetails()),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
it('should validate the current password', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const dto = { password: 'wrong-password', newPassword: 'new-password' };
|
||||
const passwordHashed = await hash('password', 10);
|
||||
const { user } = await ctx.newUser({ password: passwordHashed });
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const response = sut.changePassword(auth, dto);
|
||||
await expect(response).rejects.toThrow(BadRequestException);
|
||||
await expect(response).rejects.toThrow('Wrong password');
|
||||
});
|
||||
});
|
||||
});
|
||||
246
server/test/medium/specs/services/memory.service.spec.ts
Normal file
246
server/test/medium/specs/services/memory.service.spec.ts
Normal file
|
|
@ -0,0 +1,246 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { DateTime } from 'luxon';
|
||||
import { AssetFileType, MemoryType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { MemoryRepository } from 'src/repositories/memory.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { MemoryService } from 'src/services/memory.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(MemoryService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [
|
||||
AccessRepository,
|
||||
AssetRepository,
|
||||
DatabaseRepository,
|
||||
MemoryRepository,
|
||||
UserRepository,
|
||||
SystemMetadataRepository,
|
||||
UserRepository,
|
||||
PartnerRepository,
|
||||
],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
describe(MemoryService.name, () => {
|
||||
beforeEach(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create a new memory', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const dto = {
|
||||
type: MemoryType.OnThisDay,
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021),
|
||||
};
|
||||
|
||||
await expect(sut.create(auth, dto)).resolves.toEqual({
|
||||
id: expect.any(String),
|
||||
type: dto.type,
|
||||
data: dto.data,
|
||||
createdAt: expect.any(Date),
|
||||
updatedAt: expect.any(Date),
|
||||
isSaved: false,
|
||||
memoryAt: dto.memoryAt,
|
||||
ownerId: user.id,
|
||||
assets: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new memory (with assets)', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const auth = factory.auth({ user });
|
||||
const dto = {
|
||||
type: MemoryType.OnThisDay,
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021),
|
||||
assetIds: [asset1.id, asset2.id],
|
||||
};
|
||||
|
||||
await expect(sut.create(auth, dto)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
assets: [expect.objectContaining({ id: asset1.id }), expect.objectContaining({ id: asset2.id })],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a new memory and ignore assets the user does not have access to', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user1.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const auth = factory.auth({ user: user1 });
|
||||
const dto = {
|
||||
type: MemoryType.OnThisDay,
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021),
|
||||
assetIds: [asset1.id, asset2.id],
|
||||
};
|
||||
|
||||
await expect(sut.create(auth, dto)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
assets: [expect.objectContaining({ id: asset1.id })],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onMemoryCreate', () => {
|
||||
it('should work on an empty database', async () => {
|
||||
const { sut } = setup();
|
||||
await expect(sut.onMemoriesCreate()).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('should create a memory from an asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' }) as DateTime<true>;
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
|
||||
await Promise.all([
|
||||
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
|
||||
ctx.newJobStatus({ assetId: asset.id }),
|
||||
assetRepo.upsertFiles([
|
||||
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
|
||||
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
|
||||
]),
|
||||
]);
|
||||
|
||||
vi.setSystemTime(now.toJSDate());
|
||||
await sut.onMemoriesCreate();
|
||||
|
||||
const memories = await memoryRepo.search(user.id, {});
|
||||
expect(memories.length).toBe(1);
|
||||
expect(memories[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
createdAt: expect.any(Date),
|
||||
memoryAt: expect.any(Date),
|
||||
updatedAt: expect.any(Date),
|
||||
deletedAt: null,
|
||||
ownerId: user.id,
|
||||
assets: expect.arrayContaining([expect.objectContaining({ id: asset.id })]),
|
||||
isSaved: false,
|
||||
showAt: now.startOf('day').toJSDate(),
|
||||
hideAt: now.endOf('day').toJSDate(),
|
||||
seenAt: null,
|
||||
type: 'on_this_day',
|
||||
data: { year: 2024 },
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a memory from an asset - in advance', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const now = DateTime.fromObject({ year: 2035, month: 2, day: 26 }, { zone: 'utc' }) as DateTime<true>;
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
|
||||
await Promise.all([
|
||||
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
|
||||
ctx.newJobStatus({ assetId: asset.id }),
|
||||
assetRepo.upsertFiles([
|
||||
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
|
||||
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
|
||||
]),
|
||||
]);
|
||||
|
||||
vi.setSystemTime(now.toJSDate());
|
||||
await sut.onMemoriesCreate();
|
||||
|
||||
const memories = await memoryRepo.search(user.id, {});
|
||||
expect(memories.length).toBe(1);
|
||||
expect(memories[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
createdAt: expect.any(Date),
|
||||
memoryAt: expect.any(Date),
|
||||
updatedAt: expect.any(Date),
|
||||
deletedAt: null,
|
||||
ownerId: user.id,
|
||||
assets: expect.arrayContaining([expect.objectContaining({ id: asset.id })]),
|
||||
isSaved: false,
|
||||
showAt: now.startOf('day').toJSDate(),
|
||||
hideAt: now.endOf('day').toJSDate(),
|
||||
seenAt: null,
|
||||
type: 'on_this_day',
|
||||
data: { year: 2034 },
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not generate a memory twice for the same day', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' }) as DateTime<true>;
|
||||
const { user } = await ctx.newUser();
|
||||
for (const dto of [
|
||||
{
|
||||
ownerId: user.id,
|
||||
localDateTime: now.minus({ year: 1 }).plus({ days: 3 }).toISO(),
|
||||
},
|
||||
{
|
||||
ownerId: user.id,
|
||||
localDateTime: now.minus({ year: 1 }).plus({ days: 4 }).toISO(),
|
||||
},
|
||||
{
|
||||
ownerId: user.id,
|
||||
localDateTime: now.minus({ year: 1 }).plus({ days: 5 }).toISO(),
|
||||
},
|
||||
]) {
|
||||
const { asset } = await ctx.newAsset(dto);
|
||||
await Promise.all([
|
||||
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
|
||||
ctx.newJobStatus({ assetId: asset.id }),
|
||||
assetRepo.upsertFiles([
|
||||
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
|
||||
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
|
||||
]),
|
||||
]);
|
||||
}
|
||||
|
||||
vi.setSystemTime(now.toJSDate());
|
||||
await sut.onMemoriesCreate();
|
||||
|
||||
const memories = await memoryRepo.search(user.id, {});
|
||||
expect(memories.length).toBe(1);
|
||||
|
||||
await sut.onMemoriesCreate();
|
||||
|
||||
const memoriesAfter = await memoryRepo.search(user.id, {});
|
||||
expect(memoriesAfter.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onMemoriesCleanup', () => {
|
||||
it('should run without error', async () => {
|
||||
const { sut } = setup();
|
||||
await expect(sut.onMemoriesCleanup()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
108
server/test/medium/specs/services/metadata.service.spec.ts
Normal file
108
server/test/medium/specs/services/metadata.service.spec.ts
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import { Stats } from 'node:fs';
|
||||
import { writeFile } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { MetadataRepository } from 'src/repositories/metadata.repository';
|
||||
import { MetadataService } from 'src/services/metadata.service';
|
||||
import { automock, newRandomImage, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const metadataRepository = new MetadataRepository(
|
||||
// eslint-disable-next-line no-sparse-arrays
|
||||
automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }),
|
||||
);
|
||||
|
||||
const createTestFile = async (exifData: Record<string, any>) => {
|
||||
const data = newRandomImage();
|
||||
const filePath = join(tmpdir(), 'test.png');
|
||||
await writeFile(filePath, data);
|
||||
await metadataRepository.writeTags(filePath, exifData);
|
||||
return { filePath };
|
||||
};
|
||||
|
||||
type TimeZoneTest = {
|
||||
description: string;
|
||||
serverTimeZone?: string;
|
||||
exifData: Record<string, any>;
|
||||
expected: {
|
||||
localDateTime: string;
|
||||
dateTimeOriginal: string;
|
||||
timeZone: string | null;
|
||||
};
|
||||
};
|
||||
|
||||
describe(MetadataService.name, () => {
|
||||
let sut: MetadataService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(MetadataService, { metadata: metadataRepository }));
|
||||
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: new Date(654_321),
|
||||
mtimeMs: 654_321,
|
||||
birthtimeMs: 654_322,
|
||||
} as Stats);
|
||||
|
||||
delete process.env.TZ;
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('handleMetadataExtraction', () => {
|
||||
const timeZoneTests: TimeZoneTest[] = [
|
||||
{
|
||||
description: 'should handle no time zone information',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:01:01 00:00:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-01-01T00:00:00.000Z',
|
||||
dateTimeOriginal: '2022-01-01T00:00:00.000Z',
|
||||
timeZone: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: 'should handle a +13:00 time zone',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:01:01 00:00:00+13:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-01-01T00:00:00.000Z',
|
||||
dateTimeOriginal: '2021-12-31T11:00:00.000Z',
|
||||
timeZone: 'UTC+13',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
it.each(timeZoneTests)('$description', async ({ exifData, serverTimeZone, expected }) => {
|
||||
process.env.TZ = serverTimeZone ?? undefined;
|
||||
|
||||
const { filePath } = await createTestFile(exifData);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
|
||||
id: 'asset-1',
|
||||
originalPath: filePath,
|
||||
files: [],
|
||||
} as any);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: 'asset-1' });
|
||||
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
dateTimeOriginal: new Date(expected.dateTimeOriginal),
|
||||
timeZone: expected.timeZone,
|
||||
}),
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
localDateTime: new Date(expected.localDateTime),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
249
server/test/medium/specs/services/ocr.service.spec.ts
Normal file
249
server/test/medium/specs/services/ocr.service.spec.ts
Normal file
|
|
@ -0,0 +1,249 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetFileType, JobStatus } from 'src/enum';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
|
||||
import { OcrRepository } from 'src/repositories/ocr.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { OcrService } from 'src/services/ocr.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(OcrService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AssetRepository, AssetJobRepository, ConfigRepository, OcrRepository, SystemMetadataRepository],
|
||||
mock: [JobRepository, LoggingRepository, MachineLearningRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(OcrService.name, () => {
|
||||
it('should work', () => {
|
||||
const { sut } = setup();
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
it('should parse asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
|
||||
|
||||
const machineLearningMock = ctx.getMock(MachineLearningRepository);
|
||||
machineLearningMock.ocr.mockResolvedValue({
|
||||
box: [10, 10, 50, 10, 50, 50, 10, 50],
|
||||
boxScore: [0.99],
|
||||
text: ['Test OCR'],
|
||||
textScore: [0.95],
|
||||
});
|
||||
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
const ocrRepository = ctx.get(OcrRepository);
|
||||
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.99,
|
||||
id: expect.any(String),
|
||||
text: 'Test OCR',
|
||||
textScore: 0.95,
|
||||
isVisible: true,
|
||||
x1: 10,
|
||||
y1: 10,
|
||||
x2: 50,
|
||||
y2: 10,
|
||||
x3: 50,
|
||||
y3: 50,
|
||||
x4: 10,
|
||||
y4: 50,
|
||||
},
|
||||
]);
|
||||
await expect(
|
||||
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
|
||||
).resolves.toEqual({
|
||||
assetId: asset.id,
|
||||
text: 'Test OCR',
|
||||
});
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_job_status')
|
||||
.select('asset_job_status.ocrAt')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirst(),
|
||||
).resolves.toEqual({ ocrAt: expect.any(Date) });
|
||||
});
|
||||
|
||||
it('should handle multiple boxes', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
|
||||
|
||||
const machineLearningMock = ctx.getMock(MachineLearningRepository);
|
||||
machineLearningMock.ocr.mockResolvedValue({
|
||||
box: Array.from({ length: 8 * 5 }, (_, i) => i),
|
||||
boxScore: [0.7, 0.67, 0.65, 0.62, 0.6],
|
||||
text: ['One', 'Two', 'Three', 'Four', 'Five'],
|
||||
textScore: [0.9, 0.89, 0.88, 0.87, 0.86],
|
||||
});
|
||||
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
const ocrRepository = ctx.get(OcrRepository);
|
||||
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.7,
|
||||
id: expect.any(String),
|
||||
text: 'One',
|
||||
textScore: 0.9,
|
||||
isVisible: true,
|
||||
x1: 0,
|
||||
y1: 1,
|
||||
x2: 2,
|
||||
y2: 3,
|
||||
x3: 4,
|
||||
y3: 5,
|
||||
x4: 6,
|
||||
y4: 7,
|
||||
},
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.67,
|
||||
id: expect.any(String),
|
||||
text: 'Two',
|
||||
textScore: 0.89,
|
||||
isVisible: true,
|
||||
x1: 8,
|
||||
y1: 9,
|
||||
x2: 10,
|
||||
y2: 11,
|
||||
x3: 12,
|
||||
y3: 13,
|
||||
x4: 14,
|
||||
y4: 15,
|
||||
},
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.65,
|
||||
id: expect.any(String),
|
||||
text: 'Three',
|
||||
textScore: 0.88,
|
||||
isVisible: true,
|
||||
x1: 16,
|
||||
y1: 17,
|
||||
x2: 18,
|
||||
y2: 19,
|
||||
x3: 20,
|
||||
y3: 21,
|
||||
x4: 22,
|
||||
y4: 23,
|
||||
},
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.62,
|
||||
id: expect.any(String),
|
||||
text: 'Four',
|
||||
textScore: 0.87,
|
||||
isVisible: true,
|
||||
x1: 24,
|
||||
y1: 25,
|
||||
x2: 26,
|
||||
y2: 27,
|
||||
x3: 28,
|
||||
y3: 29,
|
||||
x4: 30,
|
||||
y4: 31,
|
||||
},
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.6,
|
||||
id: expect.any(String),
|
||||
text: 'Five',
|
||||
textScore: 0.86,
|
||||
isVisible: true,
|
||||
x1: 32,
|
||||
y1: 33,
|
||||
x2: 34,
|
||||
y2: 35,
|
||||
x3: 36,
|
||||
y3: 37,
|
||||
x4: 38,
|
||||
y4: 39,
|
||||
},
|
||||
]);
|
||||
await expect(
|
||||
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
|
||||
).resolves.toEqual({
|
||||
assetId: asset.id,
|
||||
text: 'One Two Three Four Five',
|
||||
});
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_job_status')
|
||||
.select('asset_job_status.ocrAt')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirst(),
|
||||
).resolves.toEqual({ ocrAt: expect.any(Date) });
|
||||
});
|
||||
|
||||
it('should handle no boxes', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
|
||||
|
||||
const machineLearningMock = ctx.getMock(MachineLearningRepository);
|
||||
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
|
||||
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
const ocrRepository = ctx.get(OcrRepository);
|
||||
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
|
||||
await expect(
|
||||
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
|
||||
).resolves.toBeUndefined();
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_job_status')
|
||||
.select('asset_job_status.ocrAt')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirst(),
|
||||
).resolves.toEqual({ ocrAt: expect.any(Date) });
|
||||
});
|
||||
|
||||
it('should update existing results', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
|
||||
|
||||
const machineLearningMock = ctx.getMock(MachineLearningRepository);
|
||||
machineLearningMock.ocr.mockResolvedValue({
|
||||
box: [10, 10, 50, 10, 50, 50, 10, 50],
|
||||
boxScore: [0.99],
|
||||
text: ['Test OCR'],
|
||||
textScore: [0.95],
|
||||
});
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
const ocrRepository = ctx.get(OcrRepository);
|
||||
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
|
||||
await expect(
|
||||
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
80
server/test/medium/specs/services/person.service.spec.ts
Normal file
80
server/test/medium/specs/services/person.service.spec.ts
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { PersonService } from 'src/services/person.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(PersonService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AccessRepository, DatabaseRepository, PersonRepository],
|
||||
mock: [LoggingRepository, StorageRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(PersonService.name, () => {
|
||||
describe('delete', () => {
|
||||
it('should throw an error when there is no access', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const personId = factory.uuid();
|
||||
await expect(sut.delete(auth, personId)).rejects.toThrow('Not found or no person.delete access');
|
||||
});
|
||||
|
||||
it('should delete the person', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const personRepo = ctx.get(PersonRepository);
|
||||
const storageMock = ctx.getMock(StorageRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { person } = await ctx.newPerson({ ownerId: user.id });
|
||||
const auth = factory.auth({ user });
|
||||
storageMock.unlink.mockResolvedValue();
|
||||
|
||||
await expect(personRepo.getById(person.id)).resolves.toEqual(expect.objectContaining({ id: person.id }));
|
||||
await expect(sut.delete(auth, person.id)).resolves.toBeUndefined();
|
||||
await expect(personRepo.getById(person.id)).resolves.toBeUndefined();
|
||||
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(person.thumbnailPath);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteAll', () => {
|
||||
it('should throw an error when there is no access', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const personId = factory.uuid();
|
||||
await expect(sut.deleteAll(auth, { ids: [personId] })).rejects.toThrow('Not found or no person.delete access');
|
||||
});
|
||||
|
||||
it('should delete the person', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const storageMock = ctx.getMock(StorageRepository);
|
||||
const personRepo = ctx.get(PersonRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { person: person1 } = await ctx.newPerson({ ownerId: user.id });
|
||||
const { person: person2 } = await ctx.newPerson({ ownerId: user.id });
|
||||
const auth = factory.auth({ user });
|
||||
storageMock.unlink.mockResolvedValue();
|
||||
|
||||
await expect(sut.deleteAll(auth, { ids: [person1.id, person2.id] })).resolves.toBeUndefined();
|
||||
await expect(personRepo.getById(person1.id)).resolves.toBeUndefined();
|
||||
await expect(personRepo.getById(person2.id)).resolves.toBeUndefined();
|
||||
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(person1.thumbnailPath);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(person2.thumbnailPath);
|
||||
});
|
||||
});
|
||||
});
|
||||
308
server/test/medium/specs/services/plugin.service.spec.ts
Normal file
308
server/test/medium/specs/services/plugin.service.spec.ts
Normal file
|
|
@ -0,0 +1,308 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { PluginContext } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PluginRepository } from 'src/repositories/plugin.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { PluginService } from 'src/services/plugin.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
let pluginRepo: PluginRepository;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(PluginService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [PluginRepository, AccessRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
pluginRepo = new PluginRepository(defaultDatabase);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await defaultDatabase.deleteFrom('plugin').execute();
|
||||
});
|
||||
|
||||
describe(PluginService.name, () => {
|
||||
describe('getAll', () => {
|
||||
it('should return empty array when no plugins exist', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return plugin without filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'test-plugin',
|
||||
title: 'Test Plugin',
|
||||
description: 'A test plugin',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/test.wasm' },
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toHaveLength(1);
|
||||
expect(plugins[0]).toMatchObject({
|
||||
id: result.plugin.id,
|
||||
name: 'test-plugin',
|
||||
description: 'A test plugin',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return plugin with filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'full-plugin',
|
||||
title: 'Full Plugin',
|
||||
description: 'A plugin with filters and actions',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/full.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'test-filter',
|
||||
title: 'Test Filter',
|
||||
description: 'A test filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
methodName: 'test-action',
|
||||
title: 'Test Action',
|
||||
description: 'A test action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toHaveLength(1);
|
||||
expect(plugins[0]).toMatchObject({
|
||||
id: result.plugin.id,
|
||||
name: 'full-plugin',
|
||||
filters: [
|
||||
{
|
||||
id: result.filters[0].id,
|
||||
pluginId: result.plugin.id,
|
||||
methodName: 'test-filter',
|
||||
title: 'Test Filter',
|
||||
description: 'A test filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
id: result.actions[0].id,
|
||||
pluginId: result.plugin.id,
|
||||
methodName: 'test-action',
|
||||
title: 'Test Action',
|
||||
description: 'A test action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return multiple plugins with their respective filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'plugin-1',
|
||||
title: 'Plugin 1',
|
||||
description: 'First plugin',
|
||||
author: 'Author 1',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/plugin1.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'filter-1',
|
||||
title: 'Filter 1',
|
||||
description: 'Filter for plugin 1',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'plugin-2',
|
||||
title: 'Plugin 2',
|
||||
description: 'Second plugin',
|
||||
author: 'Author 2',
|
||||
version: '2.0.0',
|
||||
wasm: { path: '/path/to/plugin2.wasm' },
|
||||
actions: [
|
||||
{
|
||||
methodName: 'action-2',
|
||||
title: 'Action 2',
|
||||
description: 'Action for plugin 2',
|
||||
supportedContexts: [PluginContext.Album],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toHaveLength(2);
|
||||
expect(plugins[0].name).toBe('plugin-1');
|
||||
expect(plugins[0].filters).toHaveLength(1);
|
||||
expect(plugins[0].actions).toHaveLength(0);
|
||||
|
||||
expect(plugins[1].name).toBe('plugin-2');
|
||||
expect(plugins[1].filters).toHaveLength(0);
|
||||
expect(plugins[1].actions).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle plugin with multiple filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'multi-plugin',
|
||||
title: 'Multi Plugin',
|
||||
description: 'Plugin with multiple items',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/multi.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'filter-a',
|
||||
title: 'Filter A',
|
||||
description: 'First filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
{
|
||||
methodName: 'filter-b',
|
||||
title: 'Filter B',
|
||||
description: 'Second filter',
|
||||
supportedContexts: [PluginContext.Album],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
methodName: 'action-x',
|
||||
title: 'Action X',
|
||||
description: 'First action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
{
|
||||
methodName: 'action-y',
|
||||
title: 'Action Y',
|
||||
description: 'Second action',
|
||||
supportedContexts: [PluginContext.Person],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toHaveLength(1);
|
||||
expect(plugins[0].filters).toHaveLength(2);
|
||||
expect(plugins[0].actions).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should throw error when plugin does not exist', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
await expect(sut.get('00000000-0000-0000-0000-000000000000')).rejects.toThrow('Plugin not found');
|
||||
});
|
||||
|
||||
it('should return single plugin with filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'single-plugin',
|
||||
title: 'Single Plugin',
|
||||
description: 'A single plugin',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/single.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'single-filter',
|
||||
title: 'Single Filter',
|
||||
description: 'A single filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
methodName: 'single-action',
|
||||
title: 'Single Action',
|
||||
description: 'A single action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const pluginResult = await sut.get(result.plugin.id);
|
||||
|
||||
expect(pluginResult).toMatchObject({
|
||||
id: result.plugin.id,
|
||||
name: 'single-plugin',
|
||||
filters: [
|
||||
{
|
||||
id: result.filters[0].id,
|
||||
methodName: 'single-filter',
|
||||
title: 'Single Filter',
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
id: result.actions[0].id,
|
||||
methodName: 'single-action',
|
||||
title: 'Single Action',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
91
server/test/medium/specs/services/search.service.spec.ts
Normal file
91
server/test/medium/specs/services/search.service.spec.ts
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { SearchRepository } from 'src/repositories/search.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SearchService } from 'src/services/search.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(SearchService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [
|
||||
AccessRepository,
|
||||
AssetRepository,
|
||||
DatabaseRepository,
|
||||
SearchRepository,
|
||||
PartnerRepository,
|
||||
PersonRepository,
|
||||
],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SearchService.name, () => {
|
||||
it('should work', () => {
|
||||
const { sut } = setup();
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
it('should return assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
const assets = [];
|
||||
const sizes = [12_334, 599, 123_456];
|
||||
|
||||
for (let i = 0; i < sizes.length; i++) {
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, fileSizeInByte: sizes[i] });
|
||||
assets.push(asset);
|
||||
}
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
await expect(sut.searchLargeAssets(auth, {})).resolves.toEqual([
|
||||
expect.objectContaining({ id: assets[2].id }),
|
||||
expect.objectContaining({ id: assets[0].id }),
|
||||
expect.objectContaining({ id: assets[1].id }),
|
||||
]);
|
||||
});
|
||||
|
||||
describe('searchStatistics', () => {
|
||||
it('should return statistics when filtering by personIds', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { person } = await ctx.newPerson({ ownerId: user.id });
|
||||
await ctx.newAssetFace({ assetId: asset.id, personId: person.id });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
const result = await sut.searchStatistics(auth, { personIds: [person.id] });
|
||||
|
||||
expect(result).toEqual({ total: 1 });
|
||||
});
|
||||
|
||||
it('should return zero when no assets match the personIds filter', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { person } = await ctx.newPerson({ ownerId: user.id });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
const result = await sut.searchStatistics(auth, { personIds: [person.id] });
|
||||
|
||||
expect(result).toEqual({ total: 0 });
|
||||
});
|
||||
});
|
||||
});
|
||||
127
server/test/medium/specs/services/shared-link.service.spec.ts
Normal file
127
server/test/medium/specs/services/shared-link.service.spec.ts
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
|
||||
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SharedLinkService } from 'src/services/shared-link.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(SharedLinkService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AccessRepository, DatabaseRepository, SharedLinkRepository, SharedLinkAssetRepository],
|
||||
mock: [LoggingRepository, StorageRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SharedLinkService.name, () => {
|
||||
describe('get', () => {
|
||||
it('should return the correct dates on the shared link album', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user.id });
|
||||
|
||||
const dates = ['2021-01-01T00:00:00.000Z', '2022-01-01T00:00:00.000Z', '2020-01-01T00:00:00.000Z'];
|
||||
|
||||
for (const date of dates) {
|
||||
const { asset } = await ctx.newAsset({ fileCreatedAt: date, localDateTime: date, ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
}
|
||||
|
||||
const sharedLinkRepo = ctx.get(SharedLinkRepository);
|
||||
|
||||
const sharedLink = await sharedLinkRepo.create({
|
||||
key: randomBytes(16),
|
||||
id: factory.uuid(),
|
||||
userId: user.id,
|
||||
albumId: album.id,
|
||||
allowUpload: true,
|
||||
type: SharedLinkType.Album,
|
||||
});
|
||||
|
||||
await expect(sut.get(auth, sharedLink.id)).resolves.toMatchObject({
|
||||
album: expect.objectContaining({
|
||||
startDate: '2020-01-01T00:00:00+00:00',
|
||||
endDate: '2022-01-01T00:00:00+00:00',
|
||||
}),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should share individually assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
const assets = await Promise.all([
|
||||
ctx.newAsset({ ownerId: user.id }),
|
||||
ctx.newAsset({ ownerId: user.id }),
|
||||
ctx.newAsset({ ownerId: user.id }),
|
||||
]);
|
||||
|
||||
for (const { asset } of assets) {
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
}
|
||||
|
||||
const sharedLinkRepo = ctx.get(SharedLinkRepository);
|
||||
|
||||
const sharedLink = await sharedLinkRepo.create({
|
||||
key: randomBytes(16),
|
||||
id: factory.uuid(),
|
||||
userId: user.id,
|
||||
allowUpload: false,
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: assets.map(({ asset }) => asset.id),
|
||||
});
|
||||
|
||||
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toMatchObject({
|
||||
assets: assets.map(({ asset }) => expect.objectContaining({ id: asset.id })),
|
||||
});
|
||||
});
|
||||
|
||||
it('should remove individually shared asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
|
||||
const sharedLinkRepo = ctx.get(SharedLinkRepository);
|
||||
|
||||
const sharedLink = await sharedLinkRepo.create({
|
||||
key: randomBytes(16),
|
||||
id: factory.uuid(),
|
||||
userId: user.id,
|
||||
allowUpload: false,
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: [asset.id],
|
||||
});
|
||||
|
||||
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toMatchObject({
|
||||
assets: [expect.objectContaining({ id: asset.id })],
|
||||
});
|
||||
|
||||
await sut.removeAssets(auth, sharedLink.id, {
|
||||
assetIds: [asset.id],
|
||||
});
|
||||
|
||||
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toHaveProperty('assets', []);
|
||||
});
|
||||
});
|
||||
46
server/test/medium/specs/services/storage.service.spec.ts
Normal file
46
server/test/medium/specs/services/storage.service.spec.ts
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { StorageService } from 'src/services/storage.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { mockEnvData } from 'test/repositories/config.repository.mock';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(StorageService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AssetRepository, DatabaseRepository, SystemMetadataRepository],
|
||||
mock: [StorageRepository, ConfigRepository, LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(StorageService.name, () => {
|
||||
describe('onBoostrap', () => {
|
||||
it('should work', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const configMock = ctx.getMock(ConfigRepository);
|
||||
configMock.getEnv.mockReturnValue(mockEnvData({}));
|
||||
|
||||
const storageMock = ctx.getMock(StorageRepository);
|
||||
storageMock.mkdirSync.mockReturnValue(void 0);
|
||||
storageMock.existsSync.mockReturnValue(true);
|
||||
storageMock.createFile.mockResolvedValue(void 0);
|
||||
storageMock.overwriteFile.mockResolvedValue(void 0);
|
||||
storageMock.readFile.mockResolvedValue(Buffer.from('test content'));
|
||||
|
||||
await expect(sut.onBootstrap()).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
226
server/test/medium/specs/services/sync.service.spec.ts
Normal file
226
server/test/medium/specs/services/sync.service.spec.ts
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { DateTime } from 'luxon';
|
||||
import { AssetMetadataKey, UserMetadataKey } from 'src/enum';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SyncRepository } from 'src/repositories/sync.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncService } from 'src/services/sync.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(SyncService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [DatabaseRepository, SyncRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
const deletedLongAgo = DateTime.now().minus({ days: 35 }).toISO();
|
||||
|
||||
const assertTableCount = async <T extends keyof DB>(db: Kysely<DB>, t: T, count: number) => {
|
||||
const { table } = db.dynamic;
|
||||
const results = await db.selectFrom(table(t).as(t)).selectAll().execute();
|
||||
expect(results).toHaveLength(count);
|
||||
};
|
||||
|
||||
describe(SyncService.name, () => {
|
||||
describe('onAuditTableCleanup', () => {
|
||||
it('should work', async () => {
|
||||
const { sut } = setup();
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('should cleanup the album_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'album_audit';
|
||||
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the album_asset_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'album_asset_audit';
|
||||
const { user } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: user.id });
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ albumId: album.id, assetId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the album_user_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'album_user_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the asset_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
await ctx.database
|
||||
.insertInto('asset_audit')
|
||||
.values({ assetId: v4(), ownerId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, 'asset_audit', 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, 'asset_audit', 0);
|
||||
});
|
||||
|
||||
it('should cleanup the asset_face_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'asset_face_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ assetFaceId: v4(), assetId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the asset_metadata_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'asset_metadata_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ assetId: v4(), key: AssetMetadataKey.MobileApp, deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the memory_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'memory_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ memoryId: v4(), userId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the memory_asset_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'memory_asset_audit';
|
||||
const { user } = await ctx.newUser();
|
||||
const { memory } = await ctx.newMemory({ ownerId: user.id });
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ memoryId: memory.id, assetId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the partner_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'partner_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ sharedById: v4(), sharedWithId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the stack_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'stack_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ stackId: v4(), userId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the user_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'user_audit';
|
||||
await ctx.database.insertInto(tableName).values({ userId: v4(), deletedAt: deletedLongAgo }).execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the user_metadata_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'user_metadata_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ userId: v4(), key: UserMetadataKey.Onboarding, deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should skip recent records', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const keep = {
|
||||
id: v4(),
|
||||
assetId: v4(),
|
||||
ownerId: v4(),
|
||||
deletedAt: DateTime.now().minus({ days: 25 }).toISO(),
|
||||
};
|
||||
|
||||
const remove = {
|
||||
id: v4(),
|
||||
assetId: v4(),
|
||||
ownerId: v4(),
|
||||
deletedAt: DateTime.now().minus({ days: 35 }).toISO(),
|
||||
};
|
||||
|
||||
await ctx.database.insertInto('asset_audit').values([keep, remove]).execute();
|
||||
await assertTableCount(ctx.database, 'asset_audit', 2);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
|
||||
const after = await ctx.database.selectFrom('asset_audit').select(['id']).execute();
|
||||
expect(after).toHaveLength(1);
|
||||
expect(after[0].id).toBe(keep.id);
|
||||
});
|
||||
});
|
||||
});
|
||||
145
server/test/medium/specs/services/tag.service.spec.ts
Normal file
145
server/test/medium/specs/services/tag.service.spec.ts
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { JobStatus } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { TagRepository } from 'src/repositories/tag.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { TagService } from 'src/services/tag.service';
|
||||
import { upsertTags } from 'src/utils/tag';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(TagService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AssetRepository, TagRepository, AccessRepository],
|
||||
mock: [EventRepository, LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(TagService.name, () => {
|
||||
describe('addAssets', () => {
|
||||
it('should lock exif column', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const [tag] = await upsertTags(ctx.get(TagRepository), { userId: user.id, tags: ['tag-1'] });
|
||||
const authDto = factory.auth({ user });
|
||||
|
||||
await sut.addAssets(authDto, tag.id, { ids: [asset.id] });
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select(['lockedProperties', 'tags'])
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({
|
||||
lockedProperties: ['tags'],
|
||||
tags: ['tag-1'],
|
||||
});
|
||||
await expect(ctx.get(TagRepository).getByValue(user.id, 'tag-1')).resolves.toEqual(
|
||||
expect.objectContaining({ id: tag.id }),
|
||||
);
|
||||
await expect(ctx.get(TagRepository).getAssetIds(tag.id, [asset.id])).resolves.toContain(asset.id);
|
||||
});
|
||||
});
|
||||
describe('deleteEmptyTags', () => {
|
||||
it('single tag exists, not connected to any assets, and is deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [tag] = await upsertTags(tagRepo, { userId: user.id, tags: ['tag-1'] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('single tag exists, connected to one asset, and is not deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [tag] = await upsertTags(tagRepo, { userId: user.id, tags: ['tag-1'] });
|
||||
|
||||
await ctx.newTagAsset({ tagIds: [tag.id], assetIds: [asset.id] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
|
||||
});
|
||||
|
||||
it('hierarchical tag exists, and the parent is connected to an asset, and the child is deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
|
||||
|
||||
await ctx.newTagAsset({ tagIds: [parentTag.id], assetIds: [asset.id] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
|
||||
expect.objectContaining({ id: childTag.id }),
|
||||
);
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('hierarchical tag exists, and only the child is connected to an asset, and nothing is deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
|
||||
|
||||
await ctx.newTagAsset({ tagIds: [childTag.id], assetIds: [asset.id] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
|
||||
expect.objectContaining({ id: childTag.id }),
|
||||
);
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
|
||||
expect.objectContaining({ id: childTag.id }),
|
||||
);
|
||||
});
|
||||
|
||||
it('hierarchical tag exists, and neither parent nor child is connected to an asset, and both are deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
|
||||
expect.objectContaining({ id: childTag.id }),
|
||||
);
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toBeUndefined();
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
209
server/test/medium/specs/services/timeline.service.spec.ts
Normal file
209
server/test/medium/specs/services/timeline.service.spec.ts
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
import { BadRequestException } from '@nestjs/common';
|
||||
import { Kysely } from 'kysely';
|
||||
import { AssetVisibility } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { TimelineService } from 'src/services/timeline.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(TimelineService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AssetRepository, AccessRepository, PartnerRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(TimelineService.name, () => {
|
||||
describe('getTimeBuckets', () => {
|
||||
it('should get time buckets by month', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const dates = [new Date('1970-01-01'), new Date('1970-02-10'), new Date('1970-02-11'), new Date('1970-02-11')];
|
||||
for (const localDateTime of dates) {
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
}
|
||||
|
||||
const response = sut.getTimeBuckets(auth, {});
|
||||
await expect(response).resolves.toEqual([
|
||||
{ count: 3, timeBucket: '1970-02-01' },
|
||||
{ count: 1, timeBucket: '1970-01-01' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const response1 = sut.getTimeBuckets(auth, { withPartners: true, visibility: AssetVisibility.Archive });
|
||||
await expect(response1).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response1).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
|
||||
const response2 = sut.getTimeBuckets(auth, { withPartners: true });
|
||||
await expect(response2).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response2).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and favorite', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const response1 = sut.getTimeBuckets(auth, { withPartners: true, isFavorite: false });
|
||||
await expect(response1).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response1).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
|
||||
const response2 = sut.getTimeBuckets(auth, { withPartners: true, isFavorite: true });
|
||||
await expect(response2).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response2).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and trash', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const response = sut.getTimeBuckets(auth, { withPartners: true, isTrashed: true });
|
||||
await expect(response).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow access for unrelated shared links', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth({ sharedLink: {} });
|
||||
const response = sut.getTimeBuckets(auth, {});
|
||||
await expect(response).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response).rejects.toThrow('Not found or no timeline.read access');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTimeBucket', () => {
|
||||
it('should return time bucket', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({
|
||||
ownerId: user.id,
|
||||
localDateTime: new Date('1970-02-12'),
|
||||
deletedAt: new Date(),
|
||||
});
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const rawResponse = await sut.getTimeBucket(auth, { timeBucket: '1970-02-01', isTrashed: true });
|
||||
const response = JSON.parse(rawResponse);
|
||||
expect(response).toEqual(expect.objectContaining({ isTrashed: [true] }));
|
||||
});
|
||||
|
||||
it('should handle a bucket without any assets', async () => {
|
||||
const { sut } = setup();
|
||||
const rawResponse = await sut.getTimeBucket(factory.auth(), { timeBucket: '1970-02-01' });
|
||||
const response = JSON.parse(rawResponse);
|
||||
expect(response).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle 5 digit years', async () => {
|
||||
const { sut } = setup();
|
||||
const rawResponse = await sut.getTimeBucket(factory.auth(), { timeBucket: '012345-01-01' });
|
||||
const response = JSON.parse(rawResponse);
|
||||
expect(response).toEqual(expect.objectContaining({ id: [] }));
|
||||
});
|
||||
|
||||
it('should return time bucket in trash', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({
|
||||
ownerId: user.id,
|
||||
localDateTime: new Date('1970-02-12'),
|
||||
deletedAt: new Date(),
|
||||
});
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const rawResponse = await sut.getTimeBucket(auth, { timeBucket: '1970-02-01', isTrashed: true });
|
||||
const response = JSON.parse(rawResponse);
|
||||
expect(response).toEqual(expect.objectContaining({ isTrashed: [true] }));
|
||||
});
|
||||
|
||||
it('should return false for favorite status unless asset owner', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const [{ asset: asset1 }, { asset: asset2 }] = await Promise.all([
|
||||
ctx.newUser().then(async ({ user }) => {
|
||||
const result = await ctx.newAsset({
|
||||
ownerId: user.id,
|
||||
fileCreatedAt: new Date('1970-02-12'),
|
||||
localDateTime: new Date('1970-02-12'),
|
||||
isFavorite: true,
|
||||
});
|
||||
await ctx.newExif({ assetId: result.asset.id, make: 'Canon' });
|
||||
return result;
|
||||
}),
|
||||
ctx.newUser().then(async ({ user }) => {
|
||||
const result = await ctx.newAsset({
|
||||
ownerId: user.id,
|
||||
fileCreatedAt: new Date('1970-02-13'),
|
||||
localDateTime: new Date('1970-02-13'),
|
||||
isFavorite: true,
|
||||
});
|
||||
await ctx.newExif({ assetId: result.asset.id, make: 'Canon' });
|
||||
return result;
|
||||
}),
|
||||
]);
|
||||
|
||||
await Promise.all([
|
||||
ctx.newPartner({ sharedById: asset1.ownerId, sharedWithId: asset2.ownerId }),
|
||||
ctx.newPartner({ sharedById: asset2.ownerId, sharedWithId: asset1.ownerId }),
|
||||
]);
|
||||
|
||||
const auth1 = factory.auth({ user: { id: asset1.ownerId } });
|
||||
const rawResponse1 = await sut.getTimeBucket(auth1, {
|
||||
timeBucket: '1970-02-01',
|
||||
withPartners: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
});
|
||||
const response1 = JSON.parse(rawResponse1);
|
||||
expect(response1).toEqual(expect.objectContaining({ id: [asset2.id, asset1.id], isFavorite: [false, true] }));
|
||||
|
||||
const auth2 = factory.auth({ user: { id: asset2.ownerId } });
|
||||
const rawResponse2 = await sut.getTimeBucket(auth2, {
|
||||
timeBucket: '1970-02-01',
|
||||
withPartners: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
});
|
||||
const response2 = JSON.parse(rawResponse2);
|
||||
expect(response2).toEqual(expect.objectContaining({ id: [asset2.id, asset1.id], isFavorite: [true, false] }));
|
||||
});
|
||||
});
|
||||
});
|
||||
181
server/test/medium/specs/services/user.service.spec.ts
Normal file
181
server/test/medium/specs/services/user.service.spec.ts
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { DateTime } from 'luxon';
|
||||
import { ImmichEnvironment, JobName, JobStatus } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { CryptoRepository } from 'src/repositories/crypto.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { UserService } from 'src/services/user.service';
|
||||
import { mediumFactory, newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
process.env.IMMICH_ENV = ImmichEnvironment.Testing;
|
||||
|
||||
return newMediumService(UserService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [CryptoRepository, ConfigRepository, SystemMetadataRepository, UserRepository],
|
||||
mock: [LoggingRepository, JobRepository, EventRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
const { ctx } = setup();
|
||||
await ctx.newUser({ isAdmin: true, email: 'admin@immich.cloud' });
|
||||
});
|
||||
|
||||
describe(UserService.name, () => {
|
||||
describe('create', () => {
|
||||
it('should create a user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const user = mediumFactory.userInsert();
|
||||
await expect(sut.createUser({ name: user.name, email: user.email })).resolves.toEqual(
|
||||
expect.objectContaining({ name: user.name, email: user.email }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should reject user with duplicate email', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const user = mediumFactory.userInsert();
|
||||
await expect(sut.createUser({ email: user.email })).resolves.toMatchObject({ email: user.email });
|
||||
await expect(sut.createUser({ email: user.email })).rejects.toThrow('User exists');
|
||||
});
|
||||
|
||||
it('should not return password', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const dto = mediumFactory.userInsert({ password: 'password' });
|
||||
const user = await sut.createUser({ email: dto.email, password: 'password' });
|
||||
expect((user as any).password).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
it('should get users', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth = factory.auth({ user: user1 });
|
||||
|
||||
await expect(sut.search(auth)).resolves.toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ email: user1.email }),
|
||||
expect.objectContaining({ email: user2.email }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should get a user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
await expect(sut.get(user.id)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not return password', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const result = await sut.get(user.id);
|
||||
|
||||
expect((result as any).password).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMe', () => {
|
||||
it('should update a user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user, result: before } = await ctx.newUser();
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const after = await sut.updateMe(auth, { name: `${before.name} Updated` });
|
||||
|
||||
expect(before.updatedAt).toBeDefined();
|
||||
expect(after.updatedAt).toBeDefined();
|
||||
expect(before.updatedAt).not.toEqual(after.updatedAt);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setLicense', () => {
|
||||
it('should set a license', async () => {
|
||||
const license = {
|
||||
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
|
||||
activationKey:
|
||||
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
|
||||
};
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await expect(sut.getLicense(auth)).rejects.toThrowError();
|
||||
const after = await sut.setLicense(auth, license);
|
||||
expect(after.licenseKey).toEqual(license.licenseKey);
|
||||
expect(after.activationKey).toEqual(license.activationKey);
|
||||
const getResponse = await sut.getLicense(auth);
|
||||
expect(getResponse).toEqual(after);
|
||||
});
|
||||
});
|
||||
|
||||
describe.sequential('handleUserDeleteCheck', () => {
|
||||
beforeEach(async () => {
|
||||
const { sut } = setup();
|
||||
// These tests specifically have to be sequential otherwise we hit race conditions with config changes applying in incorrect tests
|
||||
const config = await sut.getConfig({ withCache: false });
|
||||
config.user.deleteDelay = 7;
|
||||
await sut.updateConfig(config);
|
||||
});
|
||||
|
||||
it('should work when there are no deleted users', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
jobMock.queueAll.mockResolvedValue(void 0);
|
||||
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
|
||||
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
|
||||
});
|
||||
|
||||
it('should work when there is a user to delete', async () => {
|
||||
const { sut, ctx } = setup(await getKyselyDB());
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
const { user } = await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 60 }).toJSDate() });
|
||||
jobMock.queueAll.mockResolvedValue(void 0);
|
||||
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
|
||||
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([{ name: JobName.UserDelete, data: { id: user.id } }]);
|
||||
});
|
||||
|
||||
it('should skip a recently deleted user', async () => {
|
||||
const { sut, ctx } = setup(await getKyselyDB());
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 5 }).toJSDate() });
|
||||
jobMock.queueAll.mockResolvedValue(void 0);
|
||||
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
|
||||
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
|
||||
});
|
||||
|
||||
it('should respect a custom user delete delay', async () => {
|
||||
const { sut, ctx } = setup(await getKyselyDB());
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 25 }).toJSDate() });
|
||||
jobMock.queueAll.mockResolvedValue(void 0);
|
||||
const config = await sut.getConfig({ withCache: false });
|
||||
config.user.deleteDelay = 30;
|
||||
await sut.updateConfig(config);
|
||||
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
|
||||
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
70
server/test/medium/specs/services/version.service.spec.ts
Normal file
70
server/test/medium/specs/services/version.service.spec.ts
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { JobName } from 'src/enum';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { VersionService } from 'src/services/version.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(VersionService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [DatabaseRepository, VersionHistoryRepository],
|
||||
mock: [LoggingRepository, JobRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(VersionService.name, () => {
|
||||
describe('onBootstrap', () => {
|
||||
it('record the current version on startup', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
|
||||
|
||||
const itemsBefore = await versionHistoryRepo.getAll();
|
||||
expect(itemsBefore).toHaveLength(0);
|
||||
|
||||
await sut.onBootstrap();
|
||||
|
||||
const itemsAfter = await versionHistoryRepo.getAll();
|
||||
expect(itemsAfter).toHaveLength(1);
|
||||
expect(itemsAfter[0]).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
id: expect.any(String),
|
||||
version: serverVersion.toString(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should queue memory creation when upgrading from 1.128.0', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
|
||||
jobMock.queue.mockResolvedValue(void 0);
|
||||
|
||||
await versionHistoryRepo.create({ version: 'v1.128.0' });
|
||||
await sut.onBootstrap();
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.MemoryGenerate });
|
||||
});
|
||||
|
||||
it('should not queue memory creation when upgrading from 1.129.0', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
|
||||
|
||||
await versionHistoryRepo.create({ version: 'v1.129.0' });
|
||||
await sut.onBootstrap();
|
||||
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
776
server/test/medium/specs/services/workflow.service.spec.ts
Normal file
776
server/test/medium/specs/services/workflow.service.spec.ts
Normal file
|
|
@ -0,0 +1,776 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { PluginContext, PluginTriggerType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PluginRepository } from 'src/repositories/plugin.repository';
|
||||
import { WorkflowRepository } from 'src/repositories/workflow.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { WorkflowService } from 'src/services/workflow.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(WorkflowService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [WorkflowRepository, PluginRepository, AccessRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(WorkflowService.name, () => {
|
||||
let testPluginId: string;
|
||||
let testFilterId: string;
|
||||
let testActionId: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a test plugin with filters and actions once for all tests
|
||||
const pluginRepo = new PluginRepository(defaultDatabase);
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'test-core-plugin',
|
||||
title: 'Test Core Plugin',
|
||||
description: 'A test core plugin for workflow tests',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: {
|
||||
path: '/test/path.wasm',
|
||||
},
|
||||
filters: [
|
||||
{
|
||||
methodName: 'test-filter',
|
||||
title: 'Test Filter',
|
||||
description: 'A test filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
methodName: 'test-action',
|
||||
title: 'Test Action',
|
||||
description: 'A test action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/plugins/test-core-plugin',
|
||||
);
|
||||
|
||||
testPluginId = result.plugin.id;
|
||||
testFilterId = result.filters[0].id;
|
||||
testActionId = result.actions[0].id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await defaultDatabase.deleteFrom('plugin').where('id', '=', testPluginId).execute();
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create a workflow without filters or actions', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
expect(workflow).toMatchObject({
|
||||
id: expect.any(String),
|
||||
ownerId: user.id,
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a workflow with filters and actions', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow-with-relations',
|
||||
description: 'A test workflow with filters and actions',
|
||||
enabled: true,
|
||||
filters: [
|
||||
{
|
||||
pluginFilterId: testFilterId,
|
||||
filterConfig: { key: 'value' },
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
pluginActionId: testActionId,
|
||||
actionConfig: { action: 'test' },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(workflow).toMatchObject({
|
||||
id: expect.any(String),
|
||||
ownerId: user.id,
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow-with-relations',
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
expect(workflow.filters).toHaveLength(1);
|
||||
expect(workflow.filters[0]).toMatchObject({
|
||||
id: expect.any(String),
|
||||
workflowId: workflow.id,
|
||||
pluginFilterId: testFilterId,
|
||||
filterConfig: { key: 'value' },
|
||||
order: 0,
|
||||
});
|
||||
|
||||
expect(workflow.actions).toHaveLength(1);
|
||||
expect(workflow.actions[0]).toMatchObject({
|
||||
id: expect.any(String),
|
||||
workflowId: workflow.id,
|
||||
pluginActionId: testActionId,
|
||||
actionConfig: { action: 'test' },
|
||||
order: 0,
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error when creating workflow with invalid filter', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(
|
||||
sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'invalid-workflow',
|
||||
description: 'A workflow with invalid filter',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: factory.uuid(), filterConfig: { key: 'value' } }],
|
||||
actions: [],
|
||||
}),
|
||||
).rejects.toThrow('Invalid filter ID');
|
||||
});
|
||||
|
||||
it('should throw error when creating workflow with invalid action', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(
|
||||
sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'invalid-workflow',
|
||||
description: 'A workflow with invalid action',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ pluginActionId: factory.uuid(), actionConfig: { action: 'test' } }],
|
||||
}),
|
||||
).rejects.toThrow('Invalid action ID');
|
||||
});
|
||||
|
||||
it('should throw error when filter does not support trigger context', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
// Create a plugin with a filter that only supports Album context
|
||||
const pluginRepo = new PluginRepository(defaultDatabase);
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'album-only-plugin',
|
||||
title: 'Album Only Plugin',
|
||||
description: 'Plugin with album-only filter',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/test/album-plugin.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'album-filter',
|
||||
title: 'Album Filter',
|
||||
description: 'A filter that only works with albums',
|
||||
supportedContexts: [PluginContext.Album],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/plugins/test-core-plugin',
|
||||
);
|
||||
|
||||
await expect(
|
||||
sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'invalid-context-workflow',
|
||||
description: 'A workflow with context mismatch',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: result.filters[0].id }],
|
||||
actions: [],
|
||||
}),
|
||||
).rejects.toThrow('does not support asset context');
|
||||
});
|
||||
|
||||
it('should throw error when action does not support trigger context', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
// Create a plugin with an action that only supports Person context
|
||||
const pluginRepo = new PluginRepository(defaultDatabase);
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'person-only-plugin',
|
||||
title: 'Person Only Plugin',
|
||||
description: 'Plugin with person-only action',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/test/person-plugin.wasm' },
|
||||
actions: [
|
||||
{
|
||||
methodName: 'person-action',
|
||||
title: 'Person Action',
|
||||
description: 'An action that only works with persons',
|
||||
supportedContexts: [PluginContext.Person],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/plugins/test-core-plugin',
|
||||
);
|
||||
|
||||
await expect(
|
||||
sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'invalid-context-workflow',
|
||||
description: 'A workflow with context mismatch',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ pluginActionId: result.actions[0].id }],
|
||||
}),
|
||||
).rejects.toThrow('does not support asset context');
|
||||
});
|
||||
|
||||
it('should create workflow with multiple filters and actions in correct order', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'multi-step-workflow',
|
||||
description: 'A workflow with multiple filters and actions',
|
||||
enabled: true,
|
||||
filters: [
|
||||
{ pluginFilterId: testFilterId, filterConfig: { step: 1 } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { step: 2 } },
|
||||
],
|
||||
actions: [
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 1 } },
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 2 } },
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 3 } },
|
||||
],
|
||||
});
|
||||
|
||||
expect(workflow.filters).toHaveLength(2);
|
||||
expect(workflow.filters[0].order).toBe(0);
|
||||
expect(workflow.filters[0].filterConfig).toEqual({ step: 1 });
|
||||
expect(workflow.filters[1].order).toBe(1);
|
||||
expect(workflow.filters[1].filterConfig).toEqual({ step: 2 });
|
||||
|
||||
expect(workflow.actions).toHaveLength(3);
|
||||
expect(workflow.actions[0].order).toBe(0);
|
||||
expect(workflow.actions[1].order).toBe(1);
|
||||
expect(workflow.actions[2].order).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should return all workflows for a user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow1 = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'workflow-1',
|
||||
description: 'First workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const workflow2 = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'workflow-2',
|
||||
description: 'Second workflow',
|
||||
enabled: false,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const workflows = await sut.getAll(auth);
|
||||
|
||||
expect(workflows).toHaveLength(2);
|
||||
expect(workflows).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ id: workflow1.id, name: 'workflow-1' }),
|
||||
expect.objectContaining({ id: workflow2.id, name: 'workflow-2' }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when user has no workflows', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflows = await sut.getAll(auth);
|
||||
|
||||
expect(workflows).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not return workflows from other users', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth1 = factory.auth({ user: user1 });
|
||||
const auth2 = factory.auth({ user: user2 });
|
||||
|
||||
await sut.create(auth1, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'user1-workflow',
|
||||
description: 'User 1 workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const user2Workflows = await sut.getAll(auth2);
|
||||
|
||||
expect(user2Workflows).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should return a specific workflow by id', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { key: 'value' } }],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: { action: 'test' } }],
|
||||
});
|
||||
|
||||
const workflow = await sut.get(auth, created.id);
|
||||
|
||||
expect(workflow).toMatchObject({
|
||||
id: created.id,
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
});
|
||||
expect(workflow.filters).toHaveLength(1);
|
||||
expect(workflow.actions).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should throw error when workflow does not exist', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(sut.get(auth, '66da82df-e424-4bf4-b6f3-5d8e71620dae')).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when user does not have access to workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth1 = factory.auth({ user: user1 });
|
||||
const auth2 = factory.auth({ user: user2 });
|
||||
|
||||
const workflow = await sut.create(auth1, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'private-workflow',
|
||||
description: 'Private workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(sut.get(auth2, workflow.id)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should update workflow basic fields', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'original-workflow',
|
||||
description: 'Original description',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
name: 'updated-workflow',
|
||||
description: 'Updated description',
|
||||
enabled: false,
|
||||
});
|
||||
|
||||
expect(updated).toMatchObject({
|
||||
id: created.id,
|
||||
name: 'updated-workflow',
|
||||
description: 'Updated description',
|
||||
enabled: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should update workflow filters', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { old: 'config' } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
filters: [
|
||||
{ pluginFilterId: testFilterId, filterConfig: { new: 'config' } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { second: 'filter' } },
|
||||
],
|
||||
});
|
||||
|
||||
expect(updated.filters).toHaveLength(2);
|
||||
expect(updated.filters[0].filterConfig).toEqual({ new: 'config' });
|
||||
expect(updated.filters[1].filterConfig).toEqual({ second: 'filter' });
|
||||
});
|
||||
|
||||
it('should update workflow actions', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: { old: 'config' } }],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
actions: [
|
||||
{ pluginActionId: testActionId, actionConfig: { new: 'config' } },
|
||||
{ pluginActionId: testActionId, actionConfig: { second: 'action' } },
|
||||
],
|
||||
});
|
||||
|
||||
expect(updated.actions).toHaveLength(2);
|
||||
expect(updated.actions[0].actionConfig).toEqual({ new: 'config' });
|
||||
expect(updated.actions[1].actionConfig).toEqual({ second: 'action' });
|
||||
});
|
||||
|
||||
it('should clear filters when updated with empty array', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { key: 'value' } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
filters: [],
|
||||
});
|
||||
|
||||
expect(updated.filters).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should throw error when no fields to update', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(sut.update(auth, created.id, {})).rejects.toThrow('No fields to update');
|
||||
});
|
||||
|
||||
it('should throw error when updating non-existent workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(sut.update(auth, factory.uuid(), { name: 'updated-name' })).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when user does not have access to update workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth1 = factory.auth({ user: user1 });
|
||||
const auth2 = factory.auth({ user: user2 });
|
||||
|
||||
const workflow = await sut.create(auth1, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'private-workflow',
|
||||
description: 'Private',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.update(auth2, workflow.id, {
|
||||
name: 'hacked-workflow',
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when updating with invalid filter', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.update(auth, created.id, {
|
||||
filters: [{ pluginFilterId: factory.uuid(), filterConfig: {} }],
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when updating with invalid action', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.update(auth, created.id, { actions: [{ pluginActionId: factory.uuid(), actionConfig: {} }] }),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should update trigger type', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.PersonRecognized,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.update(auth, created.id, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
});
|
||||
|
||||
const fetched = await sut.get(auth, created.id);
|
||||
expect(fetched.triggerType).toBe(PluginTriggerType.AssetCreate);
|
||||
});
|
||||
|
||||
it('should add filters', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.update(auth, created.id, {
|
||||
filters: [
|
||||
{ pluginFilterId: testFilterId, filterConfig: { first: true } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { second: true } },
|
||||
],
|
||||
});
|
||||
|
||||
const fetched = await sut.get(auth, created.id);
|
||||
expect(fetched.filters).toHaveLength(2);
|
||||
expect(fetched.filters[0].filterConfig).toEqual({ first: true });
|
||||
expect(fetched.filters[1].filterConfig).toEqual({ second: true });
|
||||
});
|
||||
|
||||
it('should replace existing filters', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { original: true } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.update(auth, created.id, {
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { replaced: true } }],
|
||||
});
|
||||
|
||||
const fetched = await sut.get(auth, created.id);
|
||||
expect(fetched.filters).toHaveLength(1);
|
||||
expect(fetched.filters[0].filterConfig).toEqual({ replaced: true });
|
||||
});
|
||||
|
||||
it('should remove existing filters', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { toRemove: true } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.update(auth, created.id, {
|
||||
filters: [],
|
||||
});
|
||||
|
||||
const fetched = await sut.get(auth, created.id);
|
||||
expect(fetched.filters).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete a workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.delete(auth, workflow.id);
|
||||
|
||||
await expect(sut.get(auth, workflow.id)).rejects.toThrow('Not found or no workflow.read access');
|
||||
});
|
||||
|
||||
it('should delete workflow with filters and actions', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: {} }],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: {} }],
|
||||
});
|
||||
|
||||
await sut.delete(auth, workflow.id);
|
||||
|
||||
await expect(sut.get(auth, workflow.id)).rejects.toThrow('Not found or no workflow.read access');
|
||||
});
|
||||
|
||||
it('should throw error when deleting non-existent workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(sut.delete(auth, factory.uuid())).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when user does not have access to delete workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth1 = factory.auth({ user: user1 });
|
||||
const auth2 = factory.auth({ user: user2 });
|
||||
|
||||
const workflow = await sut.create(auth1, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'private-workflow',
|
||||
description: 'Private',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(sut.delete(auth2, workflow.id)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue