Source Code added
This commit is contained in:
parent
800376eafd
commit
9efa9bc6dd
3912 changed files with 754770 additions and 2 deletions
52
server/test/medium/globalSetup.ts
Normal file
52
server/test/medium/globalSetup.ts
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { getKyselyConfig } from 'src/utils/database';
|
||||
import { GenericContainer, Wait } from 'testcontainers';
|
||||
|
||||
const globalSetup = async () => {
|
||||
const templateName = 'mich';
|
||||
const postgresContainer = await new GenericContainer('ghcr.io/immich-app/postgres:14-vectorchord0.4.3')
|
||||
.withExposedPorts(5432)
|
||||
.withEnvironment({
|
||||
POSTGRES_PASSWORD: 'postgres',
|
||||
POSTGRES_USER: 'postgres',
|
||||
POSTGRES_DB: templateName,
|
||||
})
|
||||
.withCommand([
|
||||
'postgres',
|
||||
'-c',
|
||||
'shared_preload_libraries=vchord.so',
|
||||
'-c',
|
||||
'max_wal_size=2GB',
|
||||
'-c',
|
||||
'shared_buffers=512MB',
|
||||
'-c',
|
||||
'fsync=off',
|
||||
'-c',
|
||||
'full_page_writes=off',
|
||||
'-c',
|
||||
'synchronous_commit=off',
|
||||
'-c',
|
||||
'config_file=/var/lib/postgresql/data/postgresql.conf',
|
||||
])
|
||||
.withWaitStrategy(Wait.forAll([Wait.forLogMessage('database system is ready to accept connections', 2)]))
|
||||
.start();
|
||||
|
||||
const postgresPort = postgresContainer.getMappedPort(5432);
|
||||
const postgresUrl = `postgres://postgres:postgres@localhost:${postgresPort}/${templateName}`;
|
||||
|
||||
process.env.IMMICH_TEST_POSTGRES_URL = postgresUrl;
|
||||
|
||||
const db = new Kysely<DB>(getKyselyConfig({ connectionType: 'url', url: postgresUrl }));
|
||||
|
||||
const configRepository = new ConfigRepository();
|
||||
const logger = LoggingRepository.create();
|
||||
await new DatabaseRepository(db, logger, configRepository).runMigrations();
|
||||
|
||||
await db.destroy();
|
||||
};
|
||||
|
||||
export default globalSetup;
|
||||
114
server/test/medium/responses.ts
Normal file
114
server/test/medium/responses.ts
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
import { expect } from 'vitest';
|
||||
|
||||
export const errorDto = {
|
||||
unauthorized: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Authentication required',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
forbidden: {
|
||||
error: 'Forbidden',
|
||||
statusCode: 403,
|
||||
message: expect.any(String),
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
missingPermission: (permission: string) => ({
|
||||
error: 'Forbidden',
|
||||
statusCode: 403,
|
||||
message: `Missing required permission: ${permission}`,
|
||||
correlationId: expect.any(String),
|
||||
}),
|
||||
wrongPassword: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: 'Wrong password',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidToken: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Invalid user token',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidShareKey: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Invalid share key',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
invalidSharePassword: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Invalid password',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
badRequest: (message: any = null) => ({
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: message ?? expect.anything(),
|
||||
}),
|
||||
noPermission: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: expect.stringContaining('Not found or no'),
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
incorrectLogin: {
|
||||
error: 'Unauthorized',
|
||||
statusCode: 401,
|
||||
message: 'Incorrect email or password',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
alreadyHasAdmin: {
|
||||
error: 'Bad Request',
|
||||
statusCode: 400,
|
||||
message: 'The server already has an admin',
|
||||
correlationId: expect.any(String),
|
||||
},
|
||||
};
|
||||
|
||||
export const signupResponseDto = {
|
||||
admin: {
|
||||
avatarColor: expect.any(String),
|
||||
id: expect.any(String),
|
||||
name: 'Immich Admin',
|
||||
email: 'admin@immich.cloud',
|
||||
storageLabel: 'admin',
|
||||
profileImagePath: '',
|
||||
// why? lol
|
||||
shouldChangePassword: true,
|
||||
isAdmin: true,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
deletedAt: null,
|
||||
oauthId: '',
|
||||
quotaUsageInBytes: 0,
|
||||
quotaSizeInBytes: null,
|
||||
status: 'active',
|
||||
license: null,
|
||||
profileChangedAt: expect.any(String),
|
||||
},
|
||||
};
|
||||
|
||||
export const loginResponseDto = {
|
||||
admin: {
|
||||
accessToken: expect.any(String),
|
||||
name: 'Immich Admin',
|
||||
isAdmin: true,
|
||||
profileImagePath: '',
|
||||
shouldChangePassword: true,
|
||||
userEmail: 'admin@immich.cloud',
|
||||
userId: expect.any(String),
|
||||
},
|
||||
};
|
||||
export const deviceDto = {
|
||||
current: {
|
||||
id: expect.any(String),
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
current: true,
|
||||
deviceOS: '',
|
||||
deviceType: '',
|
||||
},
|
||||
};
|
||||
65
server/test/medium/specs/exif/exif-date-time.spec.ts
Normal file
65
server/test/medium/specs/exif/exif-date-time.spec.ts
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { DateTime } from 'luxon';
|
||||
import { resolve } from 'node:path';
|
||||
import { DB } from 'src/schema';
|
||||
import { ExifTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let database: Kysely<DB>;
|
||||
|
||||
const setup = async (testAssetPath: string) => {
|
||||
const ctx = new ExifTestContext(database);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
|
||||
|
||||
return { ctx, sut: ctx.sut, asset };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
database = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe('exif date time', () => {
|
||||
it('should prioritize DateTimeOriginal', async () => {
|
||||
const { ctx, sut, asset } = await setup('metadata/dates/date-priority-test.jpg');
|
||||
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
await expect(ctx.getDates(asset.id)).resolves.toEqual({
|
||||
timeZone: null,
|
||||
dateTimeOriginal: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
|
||||
localDateTime: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
|
||||
fileCreatedAt: DateTime.fromISO('2023-02-02T02:00:00.000Z').toJSDate(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should extract GPSDateTime with GPS coordinates ', async () => {
|
||||
const { ctx, sut, asset } = await setup('metadata/dates/gps-datetime.jpg');
|
||||
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
await expect(ctx.getDates(asset.id)).resolves.toEqual({
|
||||
timeZone: 'America/Los_Angeles',
|
||||
dateTimeOriginal: DateTime.fromISO('2023-11-15T12:30:00.000Z').toJSDate(),
|
||||
localDateTime: DateTime.fromISO('2023-11-15T04:30:00.000Z').toJSDate(),
|
||||
fileCreatedAt: DateTime.fromISO('2023-11-15T12:30:00.000Z').toJSDate(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should ignore the TimeCreated tag', async () => {
|
||||
const { ctx, sut, asset } = await setup('metadata/dates/time-created.jpg');
|
||||
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
const stats = ctx.getMockStats();
|
||||
|
||||
await expect(ctx.getDates(asset.id)).resolves.toEqual({
|
||||
timeZone: null,
|
||||
dateTimeOriginal: stats.mtime,
|
||||
localDateTime: stats.mtime,
|
||||
fileCreatedAt: stats.mtime,
|
||||
});
|
||||
});
|
||||
});
|
||||
31
server/test/medium/specs/exif/exif-gps.spec.ts
Normal file
31
server/test/medium/specs/exif/exif-gps.spec.ts
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { resolve } from 'node:path';
|
||||
import { DB } from 'src/schema';
|
||||
import { ExifTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let database: Kysely<DB>;
|
||||
|
||||
const setup = async (testAssetPath: string) => {
|
||||
const ctx = new ExifTestContext(database);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
|
||||
|
||||
return { ctx, sut: ctx.sut, asset };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
database = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe('exif gps', () => {
|
||||
it('should handle empty strings', async () => {
|
||||
const { ctx, sut, asset } = await setup('metadata/gps-position/empty_gps.jpg');
|
||||
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
await expect(ctx.getGps(asset.id)).resolves.toEqual({ latitude: null, longitude: null });
|
||||
});
|
||||
});
|
||||
34
server/test/medium/specs/exif/exif-tags.spec.ts
Normal file
34
server/test/medium/specs/exif/exif-tags.spec.ts
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { resolve } from 'node:path';
|
||||
import { DB } from 'src/schema';
|
||||
import { ExifTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let database: Kysely<DB>;
|
||||
|
||||
const setup = async (testAssetPath: string) => {
|
||||
const ctx = new ExifTestContext(database);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const originalPath = resolve(`../e2e/test-assets/${testAssetPath}`);
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, originalPath });
|
||||
|
||||
return { ctx, sut: ctx.sut, asset };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
database = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe('exif tags', () => {
|
||||
it('should detect and regular tags', async () => {
|
||||
const { ctx, sut, asset } = await setup('metadata/tags/picasa.jpg');
|
||||
|
||||
await sut.handleMetadataExtraction({ id: asset.id });
|
||||
|
||||
await expect(ctx.getTags(asset.id)).resolves.toEqual([
|
||||
expect.objectContaining({ assetId: asset.id, value: 'Frost', parentId: null }),
|
||||
expect.objectContaining({ assetId: asset.id, value: 'Yard', parentId: null }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,115 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetEditAction, MirrorAxis } from 'src/dtos/editing.dto';
|
||||
import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
const { ctx } = newMediumService(BaseService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
return { ctx, sut: ctx.get(AssetEditRepository) };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AssetEditRepository.name, () => {
|
||||
describe('replaceAll', () => {
|
||||
it('should set isEdited on insert', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
]);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: true });
|
||||
});
|
||||
|
||||
it('should set isEdited when inserting multiple edits', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: true });
|
||||
});
|
||||
|
||||
it('should keep isEdited when removing some edits', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: true });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
]);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: true });
|
||||
});
|
||||
|
||||
it('should set isEdited to false if all edits are deleted', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
|
||||
await sut.replaceAll(asset.id, [
|
||||
{ action: AssetEditAction.Crop, parameters: { height: 1, width: 1, x: 1, y: 1 } },
|
||||
{ action: AssetEditAction.Mirror, parameters: { axis: MirrorAxis.Horizontal } },
|
||||
{ action: AssetEditAction.Rotate, parameters: { angle: 90 } },
|
||||
]);
|
||||
|
||||
await sut.replaceAll(asset.id, []);
|
||||
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset').select('isEdited').where('id', '=', asset.id).executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ isEdited: false });
|
||||
});
|
||||
});
|
||||
});
|
||||
150
server/test/medium/specs/repositories/asset.repository.spec.ts
Normal file
150
server/test/medium/specs/repositories/asset.repository.spec.ts
Normal file
|
|
@ -0,0 +1,150 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
const { ctx } = newMediumService(BaseService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
return { ctx, sut: ctx.get(AssetRepository) };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AssetRepository.name, () => {
|
||||
describe('upsertExif', () => {
|
||||
it('should append to locked columns', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({
|
||||
assetId: asset.id,
|
||||
dateTimeOriginal: '2023-11-19T18:11:00',
|
||||
lockedProperties: ['dateTimeOriginal'],
|
||||
});
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal'] });
|
||||
|
||||
await sut.upsertExif(
|
||||
{ assetId: asset.id, lockedProperties: ['description'] },
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: ['description', 'dateTimeOriginal'] });
|
||||
});
|
||||
|
||||
it('should deduplicate locked columns', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({
|
||||
assetId: asset.id,
|
||||
dateTimeOriginal: '2023-11-19T18:11:00',
|
||||
lockedProperties: ['dateTimeOriginal', 'description'],
|
||||
});
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal', 'description'] });
|
||||
|
||||
await sut.upsertExif(
|
||||
{ assetId: asset.id, lockedProperties: ['description'] },
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: ['description', 'dateTimeOriginal'] });
|
||||
});
|
||||
});
|
||||
|
||||
describe('unlockProperties', () => {
|
||||
it('should unlock one property', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({
|
||||
assetId: asset.id,
|
||||
dateTimeOriginal: '2023-11-19T18:11:00',
|
||||
lockedProperties: ['dateTimeOriginal', 'description'],
|
||||
});
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal', 'description'] });
|
||||
|
||||
await sut.unlockProperties(asset.id, ['dateTimeOriginal']);
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: ['description'] });
|
||||
});
|
||||
|
||||
it('should unlock all properties', async () => {
|
||||
const { ctx, sut } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({
|
||||
assetId: asset.id,
|
||||
dateTimeOriginal: '2023-11-19T18:11:00',
|
||||
lockedProperties: ['dateTimeOriginal', 'description'],
|
||||
});
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: ['dateTimeOriginal', 'description'] });
|
||||
|
||||
await sut.unlockProperties(asset.id, ['description', 'dateTimeOriginal']);
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: null });
|
||||
});
|
||||
});
|
||||
});
|
||||
261
server/test/medium/specs/services/asset-media.service.spec.ts
Normal file
261
server/test/medium/specs/services/asset-media.service.spec.ts
Normal file
|
|
@ -0,0 +1,261 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetMediaStatus } from 'src/dtos/asset-media-response.dto';
|
||||
import { AssetMediaSize } from 'src/dtos/asset-media.dto';
|
||||
import { AssetFileType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetMediaService } from 'src/services/asset-media.service';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { ImmichFileResponse } from 'src/utils/file';
|
||||
import { mediumFactory, newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(AssetMediaService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AccessRepository, AssetRepository, UserRepository],
|
||||
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AssetService.name, () => {
|
||||
describe('uploadAsset', () => {
|
||||
it('should work', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const file = mediumFactory.uploadFile();
|
||||
|
||||
await expect(
|
||||
sut.uploadAsset(
|
||||
auth,
|
||||
{
|
||||
deviceId: 'some-id',
|
||||
deviceAssetId: 'some-id',
|
||||
fileModifiedAt: new Date(),
|
||||
fileCreatedAt: new Date(),
|
||||
assetData: Buffer.from('some data'),
|
||||
},
|
||||
file,
|
||||
),
|
||||
).resolves.toEqual({
|
||||
id: expect.any(String),
|
||||
status: AssetMediaStatus.CREATED,
|
||||
});
|
||||
|
||||
expect(ctx.getMock(EventRepository).emit).toHaveBeenCalledWith('AssetCreate', {
|
||||
asset: expect.objectContaining({ deviceAssetId: 'some-id' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with an empty metadata list', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
ctx.getMock(StorageRepository).utimes.mockResolvedValue();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const file = mediumFactory.uploadFile();
|
||||
|
||||
await expect(
|
||||
sut.uploadAsset(
|
||||
auth,
|
||||
{
|
||||
deviceId: 'some-id',
|
||||
deviceAssetId: 'some-id',
|
||||
fileModifiedAt: new Date(),
|
||||
fileCreatedAt: new Date(),
|
||||
assetData: Buffer.from('some data'),
|
||||
metadata: [],
|
||||
},
|
||||
file,
|
||||
),
|
||||
).resolves.toEqual({
|
||||
id: expect.any(String),
|
||||
status: AssetMediaStatus.CREATED,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('viewThumbnail', () => {
|
||||
it('should return original thumbnail by default when both exist', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create both original and edited thumbnails
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/edited/preview.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
|
||||
});
|
||||
|
||||
it('should return edited thumbnail when edited=true', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create both original and edited thumbnails
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/edited/preview.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: true });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/edited/preview.jpg');
|
||||
});
|
||||
|
||||
it('should return original thumbnail when edited=false', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create both original and edited thumbnails
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/edited/preview.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: false });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
|
||||
});
|
||||
|
||||
it('should return original thumbnail when only original exists and edited=false', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create only original thumbnail
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: false });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
|
||||
});
|
||||
|
||||
it('should return original thumbnail when only original exists and edited=true', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create only original thumbnail
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Preview,
|
||||
path: '/original/preview.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const result = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.PREVIEW, edited: true });
|
||||
|
||||
expect(result).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((result as ImmichFileResponse).path).toBe('/original/preview.jpg');
|
||||
});
|
||||
|
||||
it('should work with thumbnail size', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
// Create both original and edited thumbnails
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/original/thumbnail.jpg',
|
||||
isEdited: false,
|
||||
});
|
||||
await ctx.newAssetFile({
|
||||
assetId: asset.id,
|
||||
type: AssetFileType.Thumbnail,
|
||||
path: '/edited/thumbnail.jpg',
|
||||
isEdited: true,
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
// Test default (should get original)
|
||||
const resultDefault = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.THUMBNAIL });
|
||||
expect(resultDefault).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((resultDefault as ImmichFileResponse).path).toBe('/original/thumbnail.jpg');
|
||||
|
||||
// Test edited=true (should get edited)
|
||||
const resultEdited = await sut.viewThumbnail(auth, asset.id, { size: AssetMediaSize.THUMBNAIL, edited: true });
|
||||
expect(resultEdited).toBeInstanceOf(ImmichFileResponse);
|
||||
expect((resultEdited as ImmichFileResponse).path).toBe('/edited/thumbnail.jpg');
|
||||
});
|
||||
});
|
||||
});
|
||||
606
server/test/medium/specs/services/asset.service.spec.ts
Normal file
606
server/test/medium/specs/services/asset.service.spec.ts
Normal file
|
|
@ -0,0 +1,606 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetFileType, AssetMetadataKey, JobName, SharedLinkType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
|
||||
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
|
||||
import { StackRepository } from 'src/repositories/stack.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { AssetService } from 'src/services/asset.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(AssetService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [
|
||||
AssetRepository,
|
||||
AssetJobRepository,
|
||||
AlbumRepository,
|
||||
AccessRepository,
|
||||
SharedLinkAssetRepository,
|
||||
StackRepository,
|
||||
UserRepository,
|
||||
],
|
||||
mock: [EventRepository, LoggingRepository, JobRepository, StorageRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AssetService.name, () => {
|
||||
describe('getStatistics', () => {
|
||||
it('should return stats as numbers, not strings', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, fileSizeInByte: 12_345 });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await expect(sut.getStatistics(auth, {})).resolves.toEqual({ images: 1, total: 1, videos: 0 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('copy', () => {
|
||||
it('should copy albums', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const albumRepo = ctx.get(AlbumRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const { album } = await ctx.newAlbum({ ownerId: user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: oldAsset.id });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
await expect(albumRepo.getAssetIds(album.id, [oldAsset.id, newAsset.id])).resolves.toEqual(
|
||||
new Set([oldAsset.id, newAsset.id]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should copy shared links', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const sharedLinkRepo = ctx.get(SharedLinkRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
|
||||
const { id: sharedLinkId } = await sharedLinkRepo.create({
|
||||
allowUpload: false,
|
||||
key: Buffer.from('123'),
|
||||
type: SharedLinkType.Individual,
|
||||
userId: user.id,
|
||||
assetIds: [oldAsset.id],
|
||||
});
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
await expect(sharedLinkRepo.get(user.id, sharedLinkId)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
assets: [expect.objectContaining({ id: oldAsset.id }), expect.objectContaining({ id: newAsset.id })],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should merge stacks', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: asset1.id, description: 'bar' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
await ctx.newExif({ assetId: asset2.id, description: 'foo' });
|
||||
|
||||
await ctx.newStack({ ownerId: user.id }, [oldAsset.id, asset1.id]);
|
||||
|
||||
const {
|
||||
stack: { id: newStackId },
|
||||
} = await ctx.newStack({ ownerId: user.id }, [newAsset.id, asset2.id]);
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
await expect(stackRepo.getById(oldAsset.id)).resolves.toEqual(undefined);
|
||||
|
||||
const newStack = await stackRepo.getById(newStackId);
|
||||
expect(newStack).toEqual(
|
||||
expect.objectContaining({
|
||||
primaryAssetId: newAsset.id,
|
||||
assets: expect.arrayContaining([expect.objectContaining({ id: asset2.id })]),
|
||||
}),
|
||||
);
|
||||
expect(newStack!.assets.length).toEqual(4);
|
||||
});
|
||||
|
||||
it('should copy stack', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: asset1.id, description: 'bar' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
|
||||
const {
|
||||
stack: { id: stackId },
|
||||
} = await ctx.newStack({ ownerId: user.id }, [oldAsset.id, asset1.id]);
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
const stack = await stackRepo.getById(stackId);
|
||||
expect(stack).toEqual(
|
||||
expect.objectContaining({
|
||||
primaryAssetId: oldAsset.id,
|
||||
assets: expect.arrayContaining([expect.objectContaining({ id: newAsset.id })]),
|
||||
}),
|
||||
);
|
||||
expect(stack!.assets.length).toEqual(3);
|
||||
});
|
||||
|
||||
it('should copy favorite status', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id, isFavorite: true });
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
await expect(assetRepo.getById(newAsset.id)).resolves.toEqual(expect.objectContaining({ isFavorite: true }));
|
||||
});
|
||||
|
||||
it('should copy sidecar file', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const storageRepo = ctx.getMock(StorageRepository);
|
||||
const jobRepo = ctx.getMock(JobRepository);
|
||||
|
||||
storageRepo.copyFile.mockResolvedValue();
|
||||
jobRepo.queue.mockResolvedValue();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
const { asset: oldAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newAssetFile({
|
||||
assetId: oldAsset.id,
|
||||
path: '/path/to/my/sidecar.xmp',
|
||||
type: AssetFileType.Sidecar,
|
||||
});
|
||||
|
||||
const { asset: newAsset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.newExif({ assetId: oldAsset.id, description: 'foo' });
|
||||
await ctx.newExif({ assetId: newAsset.id, description: 'bar' });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
await sut.copy(auth, { sourceId: oldAsset.id, targetId: newAsset.id });
|
||||
|
||||
expect(storageRepo.copyFile).toHaveBeenCalledWith('/path/to/my/sidecar.xmp', `${newAsset.originalPath}.xmp`);
|
||||
|
||||
expect(jobRepo.queue).toHaveBeenCalledWith({
|
||||
name: JobName.AssetExtractMetadata,
|
||||
data: { id: newAsset.id },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const thumbnailPath = '/path/to/thumbnail.jpg';
|
||||
const previewPath = '/path/to/preview.jpg';
|
||||
const sidecarPath = '/path/to/sidecar.xmp';
|
||||
await Promise.all([
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Thumbnail, path: thumbnailPath }),
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: previewPath }),
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: sidecarPath }),
|
||||
]);
|
||||
|
||||
await sut.handleAssetDeletion({ id: asset.id, deleteOnDisk: true });
|
||||
|
||||
expect(ctx.getMock(JobRepository).queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: [thumbnailPath, previewPath, sidecarPath, asset.originalPath] },
|
||||
});
|
||||
});
|
||||
|
||||
it('should not delete offline assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, isOffline: true });
|
||||
const thumbnailPath = '/path/to/thumbnail.jpg';
|
||||
const previewPath = '/path/to/preview.jpg';
|
||||
await Promise.all([
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Thumbnail, path: thumbnailPath }),
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: previewPath }),
|
||||
ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Sidecar, path: `/path/to/sidecar.xmp` }),
|
||||
]);
|
||||
|
||||
await sut.handleAssetDeletion({ id: asset.id, deleteOnDisk: true });
|
||||
|
||||
expect(ctx.getMock(JobRepository).queue).toHaveBeenCalledWith({
|
||||
name: JobName.FileDelete,
|
||||
data: { files: [thumbnailPath, previewPath] },
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should automatically lock lockable columns', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: null });
|
||||
|
||||
await sut.update(auth, asset.id, {
|
||||
latitude: 42,
|
||||
longitude: 42,
|
||||
rating: 3,
|
||||
description: 'foo',
|
||||
dateTimeOriginal: '2023-11-19T18:11:00+01:00',
|
||||
});
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({
|
||||
lockedProperties: ['timeZone', 'rating', 'description', 'latitude', 'longitude', 'dateTimeOriginal'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should update dateTimeOriginal', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, description: 'test' });
|
||||
|
||||
await sut.update(auth, asset.id, { dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-19T18:11:00+00:00', timeZone: null }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should update dateTimeOriginal with time zone', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queue.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, description: 'test' });
|
||||
|
||||
await sut.update(auth, asset.id, { dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-20T01:11:00+00:00', timeZone: 'UTC-7' }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateAll', () => {
|
||||
it('should automatically lock lockable columns', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({ lockedProperties: null });
|
||||
|
||||
await sut.updateAll(auth, {
|
||||
ids: [asset.id],
|
||||
latitude: 42,
|
||||
description: 'foo',
|
||||
longitude: 42,
|
||||
rating: 3,
|
||||
dateTimeOriginal: '2023-11-19T18:11:00+01:00',
|
||||
});
|
||||
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select('lockedProperties')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({
|
||||
lockedProperties: ['timeZone', 'rating', 'description', 'latitude', 'longitude', 'dateTimeOriginal'],
|
||||
});
|
||||
});
|
||||
|
||||
it('should relatively update assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await sut.updateAll(auth, { ids: [asset.id], dateTimeRelative: -11 });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({
|
||||
dateTimeOriginal: '2023-11-19T18:00:00+00:00',
|
||||
}),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should update dateTimeOriginal', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, description: 'test' });
|
||||
|
||||
await sut.updateAll(auth, { ids: [asset.id], dateTimeOriginal: '2023-11-19T18:11:00' });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-19T18:11:00+00:00', timeZone: null }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should update dateTimeOriginal with time zone', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(JobRepository).queueAll.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, description: 'test' });
|
||||
|
||||
await sut.updateAll(auth, { ids: [asset.id], dateTimeOriginal: '2023-11-19T18:11:00.000-07:00' });
|
||||
|
||||
await expect(ctx.get(AssetRepository).getById(asset.id, { exifInfo: true })).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
exifInfo: expect.objectContaining({ dateTimeOriginal: '2023-11-20T01:11:00+00:00', timeZone: 'UTC-7' }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('upsertBulkMetadata', () => {
|
||||
it('should work', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const items = [{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } }];
|
||||
|
||||
await sut.upsertBulkMetadata(auth, { items });
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata.length).toEqual(1);
|
||||
expect(metadata[0]).toEqual(
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should work on conflict', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'old-id' } });
|
||||
|
||||
// verify existing metadata
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'old-id' } }),
|
||||
]);
|
||||
|
||||
const items = [{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'new-id' } }];
|
||||
await sut.upsertBulkMetadata(auth, { items });
|
||||
|
||||
// verify updated metadata
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'new-id' } }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work with multiple assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const items = [
|
||||
{ assetId: asset1.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
|
||||
{ assetId: asset2.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } },
|
||||
];
|
||||
|
||||
await sut.upsertBulkMetadata(auth, { items });
|
||||
|
||||
const metadata1 = await ctx.get(AssetRepository).getMetadata(asset1.id);
|
||||
expect(metadata1).toEqual([
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } }),
|
||||
]);
|
||||
|
||||
const metadata2 = await ctx.get(AssetRepository).getMetadata(asset2.id);
|
||||
expect(metadata2).toEqual([
|
||||
expect.objectContaining({ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should work with multiple metadata for the same asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const items = [
|
||||
{ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } },
|
||||
{ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } },
|
||||
];
|
||||
|
||||
await sut.upsertBulkMetadata(auth, { items });
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
key: AssetMetadataKey.MobileApp,
|
||||
value: { iCloudId: 'id1' },
|
||||
}),
|
||||
expect.objectContaining({
|
||||
key: 'some-other-key',
|
||||
value: { foo: 'bar' },
|
||||
}),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteBulkMetadata', () => {
|
||||
it('should work', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'foo' } });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, { items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }] });
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should work even if the item does not exist', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, { items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }] });
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should work with multiple assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset1.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset2.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id2' } });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, {
|
||||
items: [
|
||||
{ assetId: asset1.id, key: AssetMetadataKey.MobileApp },
|
||||
{ assetId: asset2.id, key: AssetMetadataKey.MobileApp },
|
||||
],
|
||||
});
|
||||
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset1.id)).resolves.toEqual([]);
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset2.id)).resolves.toEqual([]);
|
||||
});
|
||||
|
||||
it('should work with multiple metadata for the same asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, {
|
||||
items: [
|
||||
{ assetId: asset.id, key: AssetMetadataKey.MobileApp },
|
||||
{ assetId: asset.id, key: 'some-other-key' },
|
||||
],
|
||||
});
|
||||
|
||||
await expect(ctx.get(AssetRepository).getMetadata(asset.id)).resolves.toEqual([]);
|
||||
});
|
||||
|
||||
it('should not delete unspecified keys', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: AssetMetadataKey.MobileApp, value: { iCloudId: 'id1' } });
|
||||
await ctx.newMetadata({ assetId: asset.id, key: 'some-other-key', value: { foo: 'bar' } });
|
||||
|
||||
await sut.deleteBulkMetadata(auth, {
|
||||
items: [{ assetId: asset.id, key: AssetMetadataKey.MobileApp }],
|
||||
});
|
||||
|
||||
const metadata = await ctx.get(AssetRepository).getMetadata(asset.id);
|
||||
expect(metadata).toEqual([expect.objectContaining({ key: 'some-other-key', value: { foo: 'bar' } })]);
|
||||
});
|
||||
});
|
||||
});
|
||||
84
server/test/medium/specs/services/audit.database.spec.ts
Normal file
84
server/test/medium/specs/services/audit.database.spec.ts
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { partner_delete_audit, stack_delete_audit } from 'src/schema/functions';
|
||||
import { BaseService } from 'src/services/base.service';
|
||||
import { MediumTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
describe('audit', () => {
|
||||
let ctx: MediumTestContext;
|
||||
|
||||
beforeAll(async () => {
|
||||
ctx = new MediumTestContext(BaseService, {
|
||||
database: await getKyselyDB(),
|
||||
real: [],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
});
|
||||
|
||||
describe(partner_delete_audit.name, () => {
|
||||
it('should not cascade user deletes to partners_audit', async () => {
|
||||
const partnerRepo = ctx.get(PartnerRepository);
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await partnerRepo.create({ sharedById: user1.id, sharedWithId: user2.id });
|
||||
await userRepo.delete(user1, true);
|
||||
await expect(
|
||||
ctx.database.selectFrom('partner_audit').select(['id']).where('sharedById', '=', user1.id).execute(),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe(stack_delete_audit.name, () => {
|
||||
it('should not cascade user deletes to stacks_audit', async () => {
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
|
||||
await userRepo.delete(user, true);
|
||||
await expect(
|
||||
ctx.database.selectFrom('stack_audit').select(['id']).where('userId', '=', user.id).execute(),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('assets_audit', () => {
|
||||
it('should not cascade user deletes to assets_audit', async () => {
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await userRepo.delete(user, true);
|
||||
await expect(
|
||||
ctx.database.selectFrom('asset_audit').select(['id']).where('assetId', '=', asset.id).execute(),
|
||||
).resolves.toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('exif', () => {
|
||||
it('should automatically set updatedAt and updateId when the row is updated', async () => {
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
|
||||
const before = await ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select(['updatedAt', 'updateId'])
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon 2' });
|
||||
|
||||
const after = await ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select(['updatedAt', 'updateId'])
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow();
|
||||
|
||||
expect(before.updateId).not.toEqual(after.updateId);
|
||||
expect(before.updatedAt).not.toEqual(after.updatedAt);
|
||||
});
|
||||
});
|
||||
});
|
||||
66
server/test/medium/specs/services/auth-admin.service.spec.ts
Normal file
66
server/test/medium/specs/services/auth-admin.service.spec.ts
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { AuthAdminService } from 'src/services/auth-admin.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(AuthAdminService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [UserRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AuthAdminService.name, () => {
|
||||
describe('unlinkAll', () => {
|
||||
it('should reset user.oauthId', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user } = await ctx.newUser({ oauthId: 'test-oauth-id' });
|
||||
const auth = factory.auth();
|
||||
|
||||
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
|
||||
await expect(userRepo.get(user.id, { withDeleted: true })).resolves.toEqual(
|
||||
expect.objectContaining({ oauthId: '' }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should reset a deleted user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user } = await ctx.newUser({ oauthId: 'test-oauth-id', deletedAt: new Date() });
|
||||
const auth = factory.auth();
|
||||
|
||||
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
|
||||
await expect(userRepo.get(user.id, { withDeleted: true })).resolves.toEqual(
|
||||
expect.objectContaining({ oauthId: '' }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should reset multiple users', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user: user1 } = await ctx.newUser({ oauthId: '1' });
|
||||
const { user: user2 } = await ctx.newUser({ oauthId: '2', deletedAt: new Date() });
|
||||
const auth = factory.auth();
|
||||
|
||||
await expect(sut.unlinkAll(auth)).resolves.toBeUndefined();
|
||||
await expect(userRepo.get(user1.id, { withDeleted: true })).resolves.toEqual(
|
||||
expect.objectContaining({ oauthId: '' }),
|
||||
);
|
||||
await expect(userRepo.get(user2.id, { withDeleted: true })).resolves.toEqual(
|
||||
expect.objectContaining({ oauthId: '' }),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
166
server/test/medium/specs/services/auth.service.spec.ts
Normal file
166
server/test/medium/specs/services/auth.service.spec.ts
Normal file
|
|
@ -0,0 +1,166 @@
|
|||
import { BadRequestException } from '@nestjs/common';
|
||||
import { hash } from 'bcrypt';
|
||||
import { Kysely } from 'kysely';
|
||||
import { AuthType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { CryptoRepository } from 'src/repositories/crypto.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SessionRepository } from 'src/repositories/session.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { TelemetryRepository } from 'src/repositories/telemetry.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { AuthService } from 'src/services/auth.service';
|
||||
import { mediumFactory, newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(AuthService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [
|
||||
AccessRepository,
|
||||
ConfigRepository,
|
||||
CryptoRepository,
|
||||
DatabaseRepository,
|
||||
SessionRepository,
|
||||
SystemMetadataRepository,
|
||||
UserRepository,
|
||||
],
|
||||
mock: [LoggingRepository, StorageRepository, EventRepository, TelemetryRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(AuthService.name, () => {
|
||||
describe('adminSignUp', () => {
|
||||
it(`should sign up the admin`, async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
|
||||
|
||||
await expect(sut.adminSignUp(dto)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
email: dto.email,
|
||||
name: dto.name,
|
||||
isAdmin: true,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow a second admin to sign up', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
await ctx.newUser({ isAdmin: true });
|
||||
const dto = { name: 'Admin', email: 'admin@immich.cloud', password: 'password' };
|
||||
|
||||
const response = sut.adminSignUp(dto);
|
||||
await expect(response).rejects.toThrow(BadRequestException);
|
||||
await expect(response).rejects.toThrow('The server already has an admin');
|
||||
});
|
||||
});
|
||||
|
||||
describe('login', () => {
|
||||
it('should reject an incorrect password', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const password = 'password';
|
||||
const passwordHashed = await hash(password, 10);
|
||||
const { user } = await ctx.newUser({ password: passwordHashed });
|
||||
const dto = { email: user.email, password: 'wrong-password' };
|
||||
|
||||
await expect(sut.login(dto, mediumFactory.loginDetails())).rejects.toThrow('Incorrect email or password');
|
||||
});
|
||||
|
||||
it('should accept a correct password and return a login response', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const password = 'password';
|
||||
const passwordHashed = await hash(password, 10);
|
||||
const { user } = await ctx.newUser({ password: passwordHashed });
|
||||
const dto = { email: user.email, password };
|
||||
|
||||
await expect(sut.login(dto, mediumFactory.loginDetails())).resolves.toEqual({
|
||||
accessToken: expect.any(String),
|
||||
isAdmin: user.isAdmin,
|
||||
isOnboarded: false,
|
||||
name: user.name,
|
||||
profileImagePath: user.profileImagePath,
|
||||
userId: user.id,
|
||||
userEmail: user.email,
|
||||
shouldChangePassword: user.shouldChangePassword,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('logout', () => {
|
||||
it('should logout', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
});
|
||||
|
||||
it('should cleanup the session', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const sessionRepo = ctx.get(SessionRepository);
|
||||
const eventRepo = ctx.getMock(EventRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { session } = await ctx.newSession({ userId: user.id });
|
||||
const auth = factory.auth({ session, user });
|
||||
eventRepo.emit.mockResolvedValue();
|
||||
|
||||
await expect(sessionRepo.get(session.id)).resolves.toEqual(expect.objectContaining({ id: session.id }));
|
||||
await expect(sut.logout(auth, AuthType.Password)).resolves.toEqual({
|
||||
successful: true,
|
||||
redirectUri: '/auth/login?autoLaunch=0',
|
||||
});
|
||||
await expect(sessionRepo.get(session.id)).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('changePassword', () => {
|
||||
it('should change the password and login with it', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const dto = { password: 'password', newPassword: 'new-password' };
|
||||
const passwordHashed = await hash(dto.password, 10);
|
||||
const { user } = await ctx.newUser({ password: passwordHashed });
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const response = await sut.changePassword(auth, dto);
|
||||
expect(response).toEqual(
|
||||
expect.objectContaining({
|
||||
id: user.id,
|
||||
email: user.email,
|
||||
}),
|
||||
);
|
||||
expect((response as any).password).not.toBeDefined();
|
||||
|
||||
await expect(
|
||||
sut.login({ email: user.email, password: dto.newPassword }, mediumFactory.loginDetails()),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
it('should validate the current password', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const dto = { password: 'wrong-password', newPassword: 'new-password' };
|
||||
const passwordHashed = await hash('password', 10);
|
||||
const { user } = await ctx.newUser({ password: passwordHashed });
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const response = sut.changePassword(auth, dto);
|
||||
await expect(response).rejects.toThrow(BadRequestException);
|
||||
await expect(response).rejects.toThrow('Wrong password');
|
||||
});
|
||||
});
|
||||
});
|
||||
246
server/test/medium/specs/services/memory.service.spec.ts
Normal file
246
server/test/medium/specs/services/memory.service.spec.ts
Normal file
|
|
@ -0,0 +1,246 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { DateTime } from 'luxon';
|
||||
import { AssetFileType, MemoryType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { MemoryRepository } from 'src/repositories/memory.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { MemoryService } from 'src/services/memory.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(MemoryService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [
|
||||
AccessRepository,
|
||||
AssetRepository,
|
||||
DatabaseRepository,
|
||||
MemoryRepository,
|
||||
UserRepository,
|
||||
SystemMetadataRepository,
|
||||
UserRepository,
|
||||
PartnerRepository,
|
||||
],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
describe(MemoryService.name, () => {
|
||||
beforeEach(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create a new memory', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const dto = {
|
||||
type: MemoryType.OnThisDay,
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021),
|
||||
};
|
||||
|
||||
await expect(sut.create(auth, dto)).resolves.toEqual({
|
||||
id: expect.any(String),
|
||||
type: dto.type,
|
||||
data: dto.data,
|
||||
createdAt: expect.any(Date),
|
||||
updatedAt: expect.any(Date),
|
||||
isSaved: false,
|
||||
memoryAt: dto.memoryAt,
|
||||
ownerId: user.id,
|
||||
assets: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a new memory (with assets)', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const auth = factory.auth({ user });
|
||||
const dto = {
|
||||
type: MemoryType.OnThisDay,
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021),
|
||||
assetIds: [asset1.id, asset2.id],
|
||||
};
|
||||
|
||||
await expect(sut.create(auth, dto)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
assets: [expect.objectContaining({ id: asset1.id }), expect.objectContaining({ id: asset2.id })],
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a new memory and ignore assets the user does not have access to', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user1.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const auth = factory.auth({ user: user1 });
|
||||
const dto = {
|
||||
type: MemoryType.OnThisDay,
|
||||
data: { year: 2021 },
|
||||
memoryAt: new Date(2021),
|
||||
assetIds: [asset1.id, asset2.id],
|
||||
};
|
||||
|
||||
await expect(sut.create(auth, dto)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
assets: [expect.objectContaining({ id: asset1.id })],
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onMemoryCreate', () => {
|
||||
it('should work on an empty database', async () => {
|
||||
const { sut } = setup();
|
||||
await expect(sut.onMemoriesCreate()).resolves.not.toThrow();
|
||||
});
|
||||
|
||||
it('should create a memory from an asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const now = DateTime.fromObject({ year: 2025, month: 2, day: 25 }, { zone: 'utc' }) as DateTime<true>;
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
|
||||
await Promise.all([
|
||||
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
|
||||
ctx.newJobStatus({ assetId: asset.id }),
|
||||
assetRepo.upsertFiles([
|
||||
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
|
||||
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
|
||||
]),
|
||||
]);
|
||||
|
||||
vi.setSystemTime(now.toJSDate());
|
||||
await sut.onMemoriesCreate();
|
||||
|
||||
const memories = await memoryRepo.search(user.id, {});
|
||||
expect(memories.length).toBe(1);
|
||||
expect(memories[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
createdAt: expect.any(Date),
|
||||
memoryAt: expect.any(Date),
|
||||
updatedAt: expect.any(Date),
|
||||
deletedAt: null,
|
||||
ownerId: user.id,
|
||||
assets: expect.arrayContaining([expect.objectContaining({ id: asset.id })]),
|
||||
isSaved: false,
|
||||
showAt: now.startOf('day').toJSDate(),
|
||||
hideAt: now.endOf('day').toJSDate(),
|
||||
seenAt: null,
|
||||
type: 'on_this_day',
|
||||
data: { year: 2024 },
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should create a memory from an asset - in advance', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const now = DateTime.fromObject({ year: 2035, month: 2, day: 26 }, { zone: 'utc' }) as DateTime<true>;
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime: now.minus({ years: 1 }).toISO() });
|
||||
await Promise.all([
|
||||
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
|
||||
ctx.newJobStatus({ assetId: asset.id }),
|
||||
assetRepo.upsertFiles([
|
||||
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
|
||||
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
|
||||
]),
|
||||
]);
|
||||
|
||||
vi.setSystemTime(now.toJSDate());
|
||||
await sut.onMemoriesCreate();
|
||||
|
||||
const memories = await memoryRepo.search(user.id, {});
|
||||
expect(memories.length).toBe(1);
|
||||
expect(memories[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
createdAt: expect.any(Date),
|
||||
memoryAt: expect.any(Date),
|
||||
updatedAt: expect.any(Date),
|
||||
deletedAt: null,
|
||||
ownerId: user.id,
|
||||
assets: expect.arrayContaining([expect.objectContaining({ id: asset.id })]),
|
||||
isSaved: false,
|
||||
showAt: now.startOf('day').toJSDate(),
|
||||
hideAt: now.endOf('day').toJSDate(),
|
||||
seenAt: null,
|
||||
type: 'on_this_day',
|
||||
data: { year: 2034 },
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not generate a memory twice for the same day', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const now = DateTime.fromObject({ year: 2025, month: 2, day: 20 }, { zone: 'utc' }) as DateTime<true>;
|
||||
const { user } = await ctx.newUser();
|
||||
for (const dto of [
|
||||
{
|
||||
ownerId: user.id,
|
||||
localDateTime: now.minus({ year: 1 }).plus({ days: 3 }).toISO(),
|
||||
},
|
||||
{
|
||||
ownerId: user.id,
|
||||
localDateTime: now.minus({ year: 1 }).plus({ days: 4 }).toISO(),
|
||||
},
|
||||
{
|
||||
ownerId: user.id,
|
||||
localDateTime: now.minus({ year: 1 }).plus({ days: 5 }).toISO(),
|
||||
},
|
||||
]) {
|
||||
const { asset } = await ctx.newAsset(dto);
|
||||
await Promise.all([
|
||||
ctx.newExif({ assetId: asset.id, make: 'Canon' }),
|
||||
ctx.newJobStatus({ assetId: asset.id }),
|
||||
assetRepo.upsertFiles([
|
||||
{ assetId: asset.id, type: AssetFileType.Preview, path: '/path/to/preview.jpg' },
|
||||
{ assetId: asset.id, type: AssetFileType.Thumbnail, path: '/path/to/thumbnail.jpg' },
|
||||
]),
|
||||
]);
|
||||
}
|
||||
|
||||
vi.setSystemTime(now.toJSDate());
|
||||
await sut.onMemoriesCreate();
|
||||
|
||||
const memories = await memoryRepo.search(user.id, {});
|
||||
expect(memories.length).toBe(1);
|
||||
|
||||
await sut.onMemoriesCreate();
|
||||
|
||||
const memoriesAfter = await memoryRepo.search(user.id, {});
|
||||
expect(memoriesAfter.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('onMemoriesCleanup', () => {
|
||||
it('should run without error', async () => {
|
||||
const { sut } = setup();
|
||||
await expect(sut.onMemoriesCleanup()).resolves.not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
108
server/test/medium/specs/services/metadata.service.spec.ts
Normal file
108
server/test/medium/specs/services/metadata.service.spec.ts
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
import { Stats } from 'node:fs';
|
||||
import { writeFile } from 'node:fs/promises';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { MetadataRepository } from 'src/repositories/metadata.repository';
|
||||
import { MetadataService } from 'src/services/metadata.service';
|
||||
import { automock, newRandomImage, newTestService, ServiceMocks } from 'test/utils';
|
||||
|
||||
const metadataRepository = new MetadataRepository(
|
||||
// eslint-disable-next-line no-sparse-arrays
|
||||
automock(LoggingRepository, { args: [, { getEnv: () => ({}) }], strict: false }),
|
||||
);
|
||||
|
||||
const createTestFile = async (exifData: Record<string, any>) => {
|
||||
const data = newRandomImage();
|
||||
const filePath = join(tmpdir(), 'test.png');
|
||||
await writeFile(filePath, data);
|
||||
await metadataRepository.writeTags(filePath, exifData);
|
||||
return { filePath };
|
||||
};
|
||||
|
||||
type TimeZoneTest = {
|
||||
description: string;
|
||||
serverTimeZone?: string;
|
||||
exifData: Record<string, any>;
|
||||
expected: {
|
||||
localDateTime: string;
|
||||
dateTimeOriginal: string;
|
||||
timeZone: string | null;
|
||||
};
|
||||
};
|
||||
|
||||
describe(MetadataService.name, () => {
|
||||
let sut: MetadataService;
|
||||
let mocks: ServiceMocks;
|
||||
|
||||
beforeEach(() => {
|
||||
({ sut, mocks } = newTestService(MetadataService, { metadata: metadataRepository }));
|
||||
|
||||
mocks.storage.stat.mockResolvedValue({
|
||||
size: 123_456,
|
||||
mtime: new Date(654_321),
|
||||
mtimeMs: 654_321,
|
||||
birthtimeMs: 654_322,
|
||||
} as Stats);
|
||||
|
||||
delete process.env.TZ;
|
||||
});
|
||||
|
||||
it('should be defined', () => {
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
describe('handleMetadataExtraction', () => {
|
||||
const timeZoneTests: TimeZoneTest[] = [
|
||||
{
|
||||
description: 'should handle no time zone information',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:01:01 00:00:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-01-01T00:00:00.000Z',
|
||||
dateTimeOriginal: '2022-01-01T00:00:00.000Z',
|
||||
timeZone: null,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: 'should handle a +13:00 time zone',
|
||||
exifData: {
|
||||
DateTimeOriginal: '2022:01:01 00:00:00+13:00',
|
||||
},
|
||||
expected: {
|
||||
localDateTime: '2022-01-01T00:00:00.000Z',
|
||||
dateTimeOriginal: '2021-12-31T11:00:00.000Z',
|
||||
timeZone: 'UTC+13',
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
it.each(timeZoneTests)('$description', async ({ exifData, serverTimeZone, expected }) => {
|
||||
process.env.TZ = serverTimeZone ?? undefined;
|
||||
|
||||
const { filePath } = await createTestFile(exifData);
|
||||
mocks.assetJob.getForMetadataExtraction.mockResolvedValue({
|
||||
id: 'asset-1',
|
||||
originalPath: filePath,
|
||||
files: [],
|
||||
} as any);
|
||||
|
||||
await sut.handleMetadataExtraction({ id: 'asset-1' });
|
||||
|
||||
expect(mocks.asset.upsertExif).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
dateTimeOriginal: new Date(expected.dateTimeOriginal),
|
||||
timeZone: expected.timeZone,
|
||||
}),
|
||||
{ lockedPropertiesBehavior: 'skip' },
|
||||
);
|
||||
|
||||
expect(mocks.asset.update).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
localDateTime: new Date(expected.localDateTime),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
249
server/test/medium/specs/services/ocr.service.spec.ts
Normal file
249
server/test/medium/specs/services/ocr.service.spec.ts
Normal file
|
|
@ -0,0 +1,249 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetFileType, JobStatus } from 'src/enum';
|
||||
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
|
||||
import { OcrRepository } from 'src/repositories/ocr.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { OcrService } from 'src/services/ocr.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(OcrService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AssetRepository, AssetJobRepository, ConfigRepository, OcrRepository, SystemMetadataRepository],
|
||||
mock: [JobRepository, LoggingRepository, MachineLearningRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(OcrService.name, () => {
|
||||
it('should work', () => {
|
||||
const { sut } = setup();
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
it('should parse asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
|
||||
|
||||
const machineLearningMock = ctx.getMock(MachineLearningRepository);
|
||||
machineLearningMock.ocr.mockResolvedValue({
|
||||
box: [10, 10, 50, 10, 50, 50, 10, 50],
|
||||
boxScore: [0.99],
|
||||
text: ['Test OCR'],
|
||||
textScore: [0.95],
|
||||
});
|
||||
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
const ocrRepository = ctx.get(OcrRepository);
|
||||
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.99,
|
||||
id: expect.any(String),
|
||||
text: 'Test OCR',
|
||||
textScore: 0.95,
|
||||
isVisible: true,
|
||||
x1: 10,
|
||||
y1: 10,
|
||||
x2: 50,
|
||||
y2: 10,
|
||||
x3: 50,
|
||||
y3: 50,
|
||||
x4: 10,
|
||||
y4: 50,
|
||||
},
|
||||
]);
|
||||
await expect(
|
||||
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
|
||||
).resolves.toEqual({
|
||||
assetId: asset.id,
|
||||
text: 'Test OCR',
|
||||
});
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_job_status')
|
||||
.select('asset_job_status.ocrAt')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirst(),
|
||||
).resolves.toEqual({ ocrAt: expect.any(Date) });
|
||||
});
|
||||
|
||||
it('should handle multiple boxes', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
|
||||
|
||||
const machineLearningMock = ctx.getMock(MachineLearningRepository);
|
||||
machineLearningMock.ocr.mockResolvedValue({
|
||||
box: Array.from({ length: 8 * 5 }, (_, i) => i),
|
||||
boxScore: [0.7, 0.67, 0.65, 0.62, 0.6],
|
||||
text: ['One', 'Two', 'Three', 'Four', 'Five'],
|
||||
textScore: [0.9, 0.89, 0.88, 0.87, 0.86],
|
||||
});
|
||||
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
const ocrRepository = ctx.get(OcrRepository);
|
||||
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.7,
|
||||
id: expect.any(String),
|
||||
text: 'One',
|
||||
textScore: 0.9,
|
||||
isVisible: true,
|
||||
x1: 0,
|
||||
y1: 1,
|
||||
x2: 2,
|
||||
y2: 3,
|
||||
x3: 4,
|
||||
y3: 5,
|
||||
x4: 6,
|
||||
y4: 7,
|
||||
},
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.67,
|
||||
id: expect.any(String),
|
||||
text: 'Two',
|
||||
textScore: 0.89,
|
||||
isVisible: true,
|
||||
x1: 8,
|
||||
y1: 9,
|
||||
x2: 10,
|
||||
y2: 11,
|
||||
x3: 12,
|
||||
y3: 13,
|
||||
x4: 14,
|
||||
y4: 15,
|
||||
},
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.65,
|
||||
id: expect.any(String),
|
||||
text: 'Three',
|
||||
textScore: 0.88,
|
||||
isVisible: true,
|
||||
x1: 16,
|
||||
y1: 17,
|
||||
x2: 18,
|
||||
y2: 19,
|
||||
x3: 20,
|
||||
y3: 21,
|
||||
x4: 22,
|
||||
y4: 23,
|
||||
},
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.62,
|
||||
id: expect.any(String),
|
||||
text: 'Four',
|
||||
textScore: 0.87,
|
||||
isVisible: true,
|
||||
x1: 24,
|
||||
y1: 25,
|
||||
x2: 26,
|
||||
y2: 27,
|
||||
x3: 28,
|
||||
y3: 29,
|
||||
x4: 30,
|
||||
y4: 31,
|
||||
},
|
||||
{
|
||||
assetId: asset.id,
|
||||
boxScore: 0.6,
|
||||
id: expect.any(String),
|
||||
text: 'Five',
|
||||
textScore: 0.86,
|
||||
isVisible: true,
|
||||
x1: 32,
|
||||
y1: 33,
|
||||
x2: 34,
|
||||
y2: 35,
|
||||
x3: 36,
|
||||
y3: 37,
|
||||
x4: 38,
|
||||
y4: 39,
|
||||
},
|
||||
]);
|
||||
await expect(
|
||||
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
|
||||
).resolves.toEqual({
|
||||
assetId: asset.id,
|
||||
text: 'One Two Three Four Five',
|
||||
});
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_job_status')
|
||||
.select('asset_job_status.ocrAt')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirst(),
|
||||
).resolves.toEqual({ ocrAt: expect.any(Date) });
|
||||
});
|
||||
|
||||
it('should handle no boxes', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
|
||||
|
||||
const machineLearningMock = ctx.getMock(MachineLearningRepository);
|
||||
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
|
||||
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
const ocrRepository = ctx.get(OcrRepository);
|
||||
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
|
||||
await expect(
|
||||
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
|
||||
).resolves.toBeUndefined();
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_job_status')
|
||||
.select('asset_job_status.ocrAt')
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirst(),
|
||||
).resolves.toEqual({ ocrAt: expect.any(Date) });
|
||||
});
|
||||
|
||||
it('should update existing results', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
|
||||
|
||||
const machineLearningMock = ctx.getMock(MachineLearningRepository);
|
||||
machineLearningMock.ocr.mockResolvedValue({
|
||||
box: [10, 10, 50, 10, 50, 50, 10, 50],
|
||||
boxScore: [0.99],
|
||||
text: ['Test OCR'],
|
||||
textScore: [0.95],
|
||||
});
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
|
||||
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
|
||||
|
||||
const ocrRepository = ctx.get(OcrRepository);
|
||||
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
|
||||
await expect(
|
||||
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
80
server/test/medium/specs/services/person.service.spec.ts
Normal file
80
server/test/medium/specs/services/person.service.spec.ts
Normal file
|
|
@ -0,0 +1,80 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { PersonService } from 'src/services/person.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(PersonService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AccessRepository, DatabaseRepository, PersonRepository],
|
||||
mock: [LoggingRepository, StorageRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(PersonService.name, () => {
|
||||
describe('delete', () => {
|
||||
it('should throw an error when there is no access', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const personId = factory.uuid();
|
||||
await expect(sut.delete(auth, personId)).rejects.toThrow('Not found or no person.delete access');
|
||||
});
|
||||
|
||||
it('should delete the person', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const personRepo = ctx.get(PersonRepository);
|
||||
const storageMock = ctx.getMock(StorageRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { person } = await ctx.newPerson({ ownerId: user.id });
|
||||
const auth = factory.auth({ user });
|
||||
storageMock.unlink.mockResolvedValue();
|
||||
|
||||
await expect(personRepo.getById(person.id)).resolves.toEqual(expect.objectContaining({ id: person.id }));
|
||||
await expect(sut.delete(auth, person.id)).resolves.toBeUndefined();
|
||||
await expect(personRepo.getById(person.id)).resolves.toBeUndefined();
|
||||
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(person.thumbnailPath);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteAll', () => {
|
||||
it('should throw an error when there is no access', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const personId = factory.uuid();
|
||||
await expect(sut.deleteAll(auth, { ids: [personId] })).rejects.toThrow('Not found or no person.delete access');
|
||||
});
|
||||
|
||||
it('should delete the person', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const storageMock = ctx.getMock(StorageRepository);
|
||||
const personRepo = ctx.get(PersonRepository);
|
||||
const { user } = await ctx.newUser();
|
||||
const { person: person1 } = await ctx.newPerson({ ownerId: user.id });
|
||||
const { person: person2 } = await ctx.newPerson({ ownerId: user.id });
|
||||
const auth = factory.auth({ user });
|
||||
storageMock.unlink.mockResolvedValue();
|
||||
|
||||
await expect(sut.deleteAll(auth, { ids: [person1.id, person2.id] })).resolves.toBeUndefined();
|
||||
await expect(personRepo.getById(person1.id)).resolves.toBeUndefined();
|
||||
await expect(personRepo.getById(person2.id)).resolves.toBeUndefined();
|
||||
|
||||
expect(storageMock.unlink).toHaveBeenCalledTimes(2);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(person1.thumbnailPath);
|
||||
expect(storageMock.unlink).toHaveBeenCalledWith(person2.thumbnailPath);
|
||||
});
|
||||
});
|
||||
});
|
||||
308
server/test/medium/specs/services/plugin.service.spec.ts
Normal file
308
server/test/medium/specs/services/plugin.service.spec.ts
Normal file
|
|
@ -0,0 +1,308 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { PluginContext } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PluginRepository } from 'src/repositories/plugin.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { PluginService } from 'src/services/plugin.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
let pluginRepo: PluginRepository;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(PluginService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [PluginRepository, AccessRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
pluginRepo = new PluginRepository(defaultDatabase);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
await defaultDatabase.deleteFrom('plugin').execute();
|
||||
});
|
||||
|
||||
describe(PluginService.name, () => {
|
||||
describe('getAll', () => {
|
||||
it('should return empty array when no plugins exist', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toEqual([]);
|
||||
});
|
||||
|
||||
it('should return plugin without filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'test-plugin',
|
||||
title: 'Test Plugin',
|
||||
description: 'A test plugin',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/test.wasm' },
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toHaveLength(1);
|
||||
expect(plugins[0]).toMatchObject({
|
||||
id: result.plugin.id,
|
||||
name: 'test-plugin',
|
||||
description: 'A test plugin',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return plugin with filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'full-plugin',
|
||||
title: 'Full Plugin',
|
||||
description: 'A plugin with filters and actions',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/full.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'test-filter',
|
||||
title: 'Test Filter',
|
||||
description: 'A test filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
methodName: 'test-action',
|
||||
title: 'Test Action',
|
||||
description: 'A test action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toHaveLength(1);
|
||||
expect(plugins[0]).toMatchObject({
|
||||
id: result.plugin.id,
|
||||
name: 'full-plugin',
|
||||
filters: [
|
||||
{
|
||||
id: result.filters[0].id,
|
||||
pluginId: result.plugin.id,
|
||||
methodName: 'test-filter',
|
||||
title: 'Test Filter',
|
||||
description: 'A test filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
id: result.actions[0].id,
|
||||
pluginId: result.plugin.id,
|
||||
methodName: 'test-action',
|
||||
title: 'Test Action',
|
||||
description: 'A test action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: { type: 'object', properties: {} },
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('should return multiple plugins with their respective filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'plugin-1',
|
||||
title: 'Plugin 1',
|
||||
description: 'First plugin',
|
||||
author: 'Author 1',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/plugin1.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'filter-1',
|
||||
title: 'Filter 1',
|
||||
description: 'Filter for plugin 1',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'plugin-2',
|
||||
title: 'Plugin 2',
|
||||
description: 'Second plugin',
|
||||
author: 'Author 2',
|
||||
version: '2.0.0',
|
||||
wasm: { path: '/path/to/plugin2.wasm' },
|
||||
actions: [
|
||||
{
|
||||
methodName: 'action-2',
|
||||
title: 'Action 2',
|
||||
description: 'Action for plugin 2',
|
||||
supportedContexts: [PluginContext.Album],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toHaveLength(2);
|
||||
expect(plugins[0].name).toBe('plugin-1');
|
||||
expect(plugins[0].filters).toHaveLength(1);
|
||||
expect(plugins[0].actions).toHaveLength(0);
|
||||
|
||||
expect(plugins[1].name).toBe('plugin-2');
|
||||
expect(plugins[1].filters).toHaveLength(0);
|
||||
expect(plugins[1].actions).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should handle plugin with multiple filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'multi-plugin',
|
||||
title: 'Multi Plugin',
|
||||
description: 'Plugin with multiple items',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/multi.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'filter-a',
|
||||
title: 'Filter A',
|
||||
description: 'First filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
{
|
||||
methodName: 'filter-b',
|
||||
title: 'Filter B',
|
||||
description: 'Second filter',
|
||||
supportedContexts: [PluginContext.Album],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
methodName: 'action-x',
|
||||
title: 'Action X',
|
||||
description: 'First action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
{
|
||||
methodName: 'action-y',
|
||||
title: 'Action Y',
|
||||
description: 'Second action',
|
||||
supportedContexts: [PluginContext.Person],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const plugins = await sut.getAll();
|
||||
|
||||
expect(plugins).toHaveLength(1);
|
||||
expect(plugins[0].filters).toHaveLength(2);
|
||||
expect(plugins[0].actions).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should throw error when plugin does not exist', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
await expect(sut.get('00000000-0000-0000-0000-000000000000')).rejects.toThrow('Plugin not found');
|
||||
});
|
||||
|
||||
it('should return single plugin with filters and actions', async () => {
|
||||
const { sut } = setup();
|
||||
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'single-plugin',
|
||||
title: 'Single Plugin',
|
||||
description: 'A single plugin',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/path/to/single.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'single-filter',
|
||||
title: 'Single Filter',
|
||||
description: 'A single filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
methodName: 'single-action',
|
||||
title: 'Single Action',
|
||||
description: 'A single action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/test/base/path',
|
||||
);
|
||||
|
||||
const pluginResult = await sut.get(result.plugin.id);
|
||||
|
||||
expect(pluginResult).toMatchObject({
|
||||
id: result.plugin.id,
|
||||
name: 'single-plugin',
|
||||
filters: [
|
||||
{
|
||||
id: result.filters[0].id,
|
||||
methodName: 'single-filter',
|
||||
title: 'Single Filter',
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
id: result.actions[0].id,
|
||||
methodName: 'single-action',
|
||||
title: 'Single Action',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
91
server/test/medium/specs/services/search.service.spec.ts
Normal file
91
server/test/medium/specs/services/search.service.spec.ts
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { SearchRepository } from 'src/repositories/search.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SearchService } from 'src/services/search.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(SearchService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [
|
||||
AccessRepository,
|
||||
AssetRepository,
|
||||
DatabaseRepository,
|
||||
SearchRepository,
|
||||
PartnerRepository,
|
||||
PersonRepository,
|
||||
],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SearchService.name, () => {
|
||||
it('should work', () => {
|
||||
const { sut } = setup();
|
||||
expect(sut).toBeDefined();
|
||||
});
|
||||
|
||||
it('should return assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
const assets = [];
|
||||
const sizes = [12_334, 599, 123_456];
|
||||
|
||||
for (let i = 0; i < sizes.length; i++) {
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, fileSizeInByte: sizes[i] });
|
||||
assets.push(asset);
|
||||
}
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
await expect(sut.searchLargeAssets(auth, {})).resolves.toEqual([
|
||||
expect.objectContaining({ id: assets[2].id }),
|
||||
expect.objectContaining({ id: assets[0].id }),
|
||||
expect.objectContaining({ id: assets[1].id }),
|
||||
]);
|
||||
});
|
||||
|
||||
describe('searchStatistics', () => {
|
||||
it('should return statistics when filtering by personIds', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { person } = await ctx.newPerson({ ownerId: user.id });
|
||||
await ctx.newAssetFace({ assetId: asset.id, personId: person.id });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
const result = await sut.searchStatistics(auth, { personIds: [person.id] });
|
||||
|
||||
expect(result).toEqual({ total: 1 });
|
||||
});
|
||||
|
||||
it('should return zero when no assets match the personIds filter', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { person } = await ctx.newPerson({ ownerId: user.id });
|
||||
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
|
||||
const result = await sut.searchStatistics(auth, { personIds: [person.id] });
|
||||
|
||||
expect(result).toEqual({ total: 0 });
|
||||
});
|
||||
});
|
||||
});
|
||||
127
server/test/medium/specs/services/shared-link.service.spec.ts
Normal file
127
server/test/medium/specs/services/shared-link.service.spec.ts
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { SharedLinkType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SharedLinkAssetRepository } from 'src/repositories/shared-link-asset.repository';
|
||||
import { SharedLinkRepository } from 'src/repositories/shared-link.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SharedLinkService } from 'src/services/shared-link.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(SharedLinkService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AccessRepository, DatabaseRepository, SharedLinkRepository, SharedLinkAssetRepository],
|
||||
mock: [LoggingRepository, StorageRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SharedLinkService.name, () => {
|
||||
describe('get', () => {
|
||||
it('should return the correct dates on the shared link album', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user.id });
|
||||
|
||||
const dates = ['2021-01-01T00:00:00.000Z', '2022-01-01T00:00:00.000Z', '2020-01-01T00:00:00.000Z'];
|
||||
|
||||
for (const date of dates) {
|
||||
const { asset } = await ctx.newAsset({ fileCreatedAt: date, localDateTime: date, ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
}
|
||||
|
||||
const sharedLinkRepo = ctx.get(SharedLinkRepository);
|
||||
|
||||
const sharedLink = await sharedLinkRepo.create({
|
||||
key: randomBytes(16),
|
||||
id: factory.uuid(),
|
||||
userId: user.id,
|
||||
albumId: album.id,
|
||||
allowUpload: true,
|
||||
type: SharedLinkType.Album,
|
||||
});
|
||||
|
||||
await expect(sut.get(auth, sharedLink.id)).resolves.toMatchObject({
|
||||
album: expect.objectContaining({
|
||||
startDate: '2020-01-01T00:00:00+00:00',
|
||||
endDate: '2022-01-01T00:00:00+00:00',
|
||||
}),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('should share individually assets', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
const assets = await Promise.all([
|
||||
ctx.newAsset({ ownerId: user.id }),
|
||||
ctx.newAsset({ ownerId: user.id }),
|
||||
ctx.newAsset({ ownerId: user.id }),
|
||||
]);
|
||||
|
||||
for (const { asset } of assets) {
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
}
|
||||
|
||||
const sharedLinkRepo = ctx.get(SharedLinkRepository);
|
||||
|
||||
const sharedLink = await sharedLinkRepo.create({
|
||||
key: randomBytes(16),
|
||||
id: factory.uuid(),
|
||||
userId: user.id,
|
||||
allowUpload: false,
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: assets.map(({ asset }) => asset.id),
|
||||
});
|
||||
|
||||
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toMatchObject({
|
||||
assets: assets.map(({ asset }) => expect.objectContaining({ id: asset.id })),
|
||||
});
|
||||
});
|
||||
|
||||
it('should remove individually shared asset', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
|
||||
const sharedLinkRepo = ctx.get(SharedLinkRepository);
|
||||
|
||||
const sharedLink = await sharedLinkRepo.create({
|
||||
key: randomBytes(16),
|
||||
id: factory.uuid(),
|
||||
userId: user.id,
|
||||
allowUpload: false,
|
||||
type: SharedLinkType.Individual,
|
||||
assetIds: [asset.id],
|
||||
});
|
||||
|
||||
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toMatchObject({
|
||||
assets: [expect.objectContaining({ id: asset.id })],
|
||||
});
|
||||
|
||||
await sut.removeAssets(auth, sharedLink.id, {
|
||||
assetIds: [asset.id],
|
||||
});
|
||||
|
||||
await expect(sut.getMine({ user, sharedLink }, {})).resolves.toHaveProperty('assets', []);
|
||||
});
|
||||
});
|
||||
46
server/test/medium/specs/services/storage.service.spec.ts
Normal file
46
server/test/medium/specs/services/storage.service.spec.ts
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { StorageRepository } from 'src/repositories/storage.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { StorageService } from 'src/services/storage.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { mockEnvData } from 'test/repositories/config.repository.mock';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(StorageService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AssetRepository, DatabaseRepository, SystemMetadataRepository],
|
||||
mock: [StorageRepository, ConfigRepository, LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(StorageService.name, () => {
|
||||
describe('onBoostrap', () => {
|
||||
it('should work', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const configMock = ctx.getMock(ConfigRepository);
|
||||
configMock.getEnv.mockReturnValue(mockEnvData({}));
|
||||
|
||||
const storageMock = ctx.getMock(StorageRepository);
|
||||
storageMock.mkdirSync.mockReturnValue(void 0);
|
||||
storageMock.existsSync.mockReturnValue(true);
|
||||
storageMock.createFile.mockResolvedValue(void 0);
|
||||
storageMock.overwriteFile.mockResolvedValue(void 0);
|
||||
storageMock.readFile.mockResolvedValue(Buffer.from('test content'));
|
||||
|
||||
await expect(sut.onBootstrap()).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
226
server/test/medium/specs/services/sync.service.spec.ts
Normal file
226
server/test/medium/specs/services/sync.service.spec.ts
Normal file
|
|
@ -0,0 +1,226 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { DateTime } from 'luxon';
|
||||
import { AssetMetadataKey, UserMetadataKey } from 'src/enum';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SyncRepository } from 'src/repositories/sync.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncService } from 'src/services/sync.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
import { v4 } from 'uuid';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(SyncService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [DatabaseRepository, SyncRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
const deletedLongAgo = DateTime.now().minus({ days: 35 }).toISO();
|
||||
|
||||
const assertTableCount = async <T extends keyof DB>(db: Kysely<DB>, t: T, count: number) => {
|
||||
const { table } = db.dynamic;
|
||||
const results = await db.selectFrom(table(t).as(t)).selectAll().execute();
|
||||
expect(results).toHaveLength(count);
|
||||
};
|
||||
|
||||
describe(SyncService.name, () => {
|
||||
describe('onAuditTableCleanup', () => {
|
||||
it('should work', async () => {
|
||||
const { sut } = setup();
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('should cleanup the album_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'album_audit';
|
||||
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the album_asset_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'album_asset_audit';
|
||||
const { user } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: user.id });
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ albumId: album.id, assetId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the album_user_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'album_user_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ albumId: v4(), userId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the asset_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
await ctx.database
|
||||
.insertInto('asset_audit')
|
||||
.values({ assetId: v4(), ownerId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, 'asset_audit', 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, 'asset_audit', 0);
|
||||
});
|
||||
|
||||
it('should cleanup the asset_face_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'asset_face_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ assetFaceId: v4(), assetId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the asset_metadata_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'asset_metadata_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ assetId: v4(), key: AssetMetadataKey.MobileApp, deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the memory_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'memory_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ memoryId: v4(), userId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the memory_asset_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'memory_asset_audit';
|
||||
const { user } = await ctx.newUser();
|
||||
const { memory } = await ctx.newMemory({ ownerId: user.id });
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ memoryId: memory.id, assetId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the partner_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'partner_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ sharedById: v4(), sharedWithId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the stack_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'stack_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ stackId: v4(), userId: v4(), deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the user_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'user_audit';
|
||||
await ctx.database.insertInto(tableName).values({ userId: v4(), deletedAt: deletedLongAgo }).execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should cleanup the user_metadata_audit table', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const tableName = 'user_metadata_audit';
|
||||
await ctx.database
|
||||
.insertInto(tableName)
|
||||
.values({ userId: v4(), key: UserMetadataKey.Onboarding, deletedAt: deletedLongAgo })
|
||||
.execute();
|
||||
|
||||
await assertTableCount(ctx.database, tableName, 1);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
await assertTableCount(ctx.database, tableName, 0);
|
||||
});
|
||||
|
||||
it('should skip recent records', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
|
||||
const keep = {
|
||||
id: v4(),
|
||||
assetId: v4(),
|
||||
ownerId: v4(),
|
||||
deletedAt: DateTime.now().minus({ days: 25 }).toISO(),
|
||||
};
|
||||
|
||||
const remove = {
|
||||
id: v4(),
|
||||
assetId: v4(),
|
||||
ownerId: v4(),
|
||||
deletedAt: DateTime.now().minus({ days: 35 }).toISO(),
|
||||
};
|
||||
|
||||
await ctx.database.insertInto('asset_audit').values([keep, remove]).execute();
|
||||
await assertTableCount(ctx.database, 'asset_audit', 2);
|
||||
await expect(sut.onAuditTableCleanup()).resolves.toBeUndefined();
|
||||
|
||||
const after = await ctx.database.selectFrom('asset_audit').select(['id']).execute();
|
||||
expect(after).toHaveLength(1);
|
||||
expect(after[0].id).toBe(keep.id);
|
||||
});
|
||||
});
|
||||
});
|
||||
145
server/test/medium/specs/services/tag.service.spec.ts
Normal file
145
server/test/medium/specs/services/tag.service.spec.ts
Normal file
|
|
@ -0,0 +1,145 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { JobStatus } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { TagRepository } from 'src/repositories/tag.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { TagService } from 'src/services/tag.service';
|
||||
import { upsertTags } from 'src/utils/tag';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(TagService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AssetRepository, TagRepository, AccessRepository],
|
||||
mock: [EventRepository, LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(TagService.name, () => {
|
||||
describe('addAssets', () => {
|
||||
it('should lock exif column', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const [tag] = await upsertTags(ctx.get(TagRepository), { userId: user.id, tags: ['tag-1'] });
|
||||
const authDto = factory.auth({ user });
|
||||
|
||||
await sut.addAssets(authDto, tag.id, { ids: [asset.id] });
|
||||
await expect(
|
||||
ctx.database
|
||||
.selectFrom('asset_exif')
|
||||
.select(['lockedProperties', 'tags'])
|
||||
.where('assetId', '=', asset.id)
|
||||
.executeTakeFirstOrThrow(),
|
||||
).resolves.toEqual({
|
||||
lockedProperties: ['tags'],
|
||||
tags: ['tag-1'],
|
||||
});
|
||||
await expect(ctx.get(TagRepository).getByValue(user.id, 'tag-1')).resolves.toEqual(
|
||||
expect.objectContaining({ id: tag.id }),
|
||||
);
|
||||
await expect(ctx.get(TagRepository).getAssetIds(tag.id, [asset.id])).resolves.toContain(asset.id);
|
||||
});
|
||||
});
|
||||
describe('deleteEmptyTags', () => {
|
||||
it('single tag exists, not connected to any assets, and is deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [tag] = await upsertTags(tagRepo, { userId: user.id, tags: ['tag-1'] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('single tag exists, connected to one asset, and is not deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [tag] = await upsertTags(tagRepo, { userId: user.id, tags: ['tag-1'] });
|
||||
|
||||
await ctx.newTagAsset({ tagIds: [tag.id], assetIds: [asset.id] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'tag-1')).resolves.toEqual(expect.objectContaining({ id: tag.id }));
|
||||
});
|
||||
|
||||
it('hierarchical tag exists, and the parent is connected to an asset, and the child is deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
|
||||
|
||||
await ctx.newTagAsset({ tagIds: [parentTag.id], assetIds: [asset.id] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
|
||||
expect.objectContaining({ id: childTag.id }),
|
||||
);
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('hierarchical tag exists, and only the child is connected to an asset, and nothing is deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
|
||||
|
||||
await ctx.newTagAsset({ tagIds: [childTag.id], assetIds: [asset.id] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
|
||||
expect.objectContaining({ id: childTag.id }),
|
||||
);
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
|
||||
expect.objectContaining({ id: childTag.id }),
|
||||
);
|
||||
});
|
||||
|
||||
it('hierarchical tag exists, and neither parent nor child is connected to an asset, and both are deleted', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const tagRepo = ctx.get(TagRepository);
|
||||
const [parentTag, childTag] = await upsertTags(tagRepo, { userId: user.id, tags: ['parent', 'parent/child'] });
|
||||
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toEqual(
|
||||
expect.objectContaining({ id: parentTag.id }),
|
||||
);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toEqual(
|
||||
expect.objectContaining({ id: childTag.id }),
|
||||
);
|
||||
await expect(sut.handleTagCleanup()).resolves.toBe(JobStatus.Success);
|
||||
await expect(tagRepo.getByValue(user.id, 'parent/child')).resolves.toBeUndefined();
|
||||
await expect(tagRepo.getByValue(user.id, 'parent')).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
209
server/test/medium/specs/services/timeline.service.spec.ts
Normal file
209
server/test/medium/specs/services/timeline.service.spec.ts
Normal file
|
|
@ -0,0 +1,209 @@
|
|||
import { BadRequestException } from '@nestjs/common';
|
||||
import { Kysely } from 'kysely';
|
||||
import { AssetVisibility } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { TimelineService } from 'src/services/timeline.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(TimelineService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [AssetRepository, AccessRepository, PartnerRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(TimelineService.name, () => {
|
||||
describe('getTimeBuckets', () => {
|
||||
it('should get time buckets by month', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
const dates = [new Date('1970-01-01'), new Date('1970-02-10'), new Date('1970-02-11'), new Date('1970-02-11')];
|
||||
for (const localDateTime of dates) {
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id, localDateTime });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
}
|
||||
|
||||
const response = sut.getTimeBuckets(auth, {});
|
||||
await expect(response).resolves.toEqual([
|
||||
{ count: 3, timeBucket: '1970-02-01' },
|
||||
{ count: 1, timeBucket: '1970-01-01' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and archived', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const response1 = sut.getTimeBuckets(auth, { withPartners: true, visibility: AssetVisibility.Archive });
|
||||
await expect(response1).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response1).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
|
||||
const response2 = sut.getTimeBuckets(auth, { withPartners: true });
|
||||
await expect(response2).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response2).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and favorite', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const response1 = sut.getTimeBuckets(auth, { withPartners: true, isFavorite: false });
|
||||
await expect(response1).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response1).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
|
||||
const response2 = sut.getTimeBuckets(auth, { withPartners: true, isFavorite: true });
|
||||
await expect(response2).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response2).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
});
|
||||
|
||||
it('should return error if time bucket is requested with partners asset and trash', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth();
|
||||
const response = sut.getTimeBuckets(auth, { withPartners: true, isTrashed: true });
|
||||
await expect(response).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response).rejects.toThrow(
|
||||
'withPartners is only supported for non-archived, non-trashed, non-favorited assets',
|
||||
);
|
||||
});
|
||||
|
||||
it('should not allow access for unrelated shared links', async () => {
|
||||
const { sut } = setup();
|
||||
const auth = factory.auth({ sharedLink: {} });
|
||||
const response = sut.getTimeBuckets(auth, {});
|
||||
await expect(response).rejects.toBeInstanceOf(BadRequestException);
|
||||
await expect(response).rejects.toThrow('Not found or no timeline.read access');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getTimeBucket', () => {
|
||||
it('should return time bucket', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({
|
||||
ownerId: user.id,
|
||||
localDateTime: new Date('1970-02-12'),
|
||||
deletedAt: new Date(),
|
||||
});
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const rawResponse = await sut.getTimeBucket(auth, { timeBucket: '1970-02-01', isTrashed: true });
|
||||
const response = JSON.parse(rawResponse);
|
||||
expect(response).toEqual(expect.objectContaining({ isTrashed: [true] }));
|
||||
});
|
||||
|
||||
it('should handle a bucket without any assets', async () => {
|
||||
const { sut } = setup();
|
||||
const rawResponse = await sut.getTimeBucket(factory.auth(), { timeBucket: '1970-02-01' });
|
||||
const response = JSON.parse(rawResponse);
|
||||
expect(response).toEqual({
|
||||
city: [],
|
||||
country: [],
|
||||
duration: [],
|
||||
id: [],
|
||||
visibility: [],
|
||||
isFavorite: [],
|
||||
isImage: [],
|
||||
isTrashed: [],
|
||||
livePhotoVideoId: [],
|
||||
fileCreatedAt: [],
|
||||
localOffsetHours: [],
|
||||
ownerId: [],
|
||||
projectionType: [],
|
||||
ratio: [],
|
||||
status: [],
|
||||
thumbhash: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle 5 digit years', async () => {
|
||||
const { sut } = setup();
|
||||
const rawResponse = await sut.getTimeBucket(factory.auth(), { timeBucket: '012345-01-01' });
|
||||
const response = JSON.parse(rawResponse);
|
||||
expect(response).toEqual(expect.objectContaining({ id: [] }));
|
||||
});
|
||||
|
||||
it('should return time bucket in trash', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({
|
||||
ownerId: user.id,
|
||||
localDateTime: new Date('1970-02-12'),
|
||||
deletedAt: new Date(),
|
||||
});
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const rawResponse = await sut.getTimeBucket(auth, { timeBucket: '1970-02-01', isTrashed: true });
|
||||
const response = JSON.parse(rawResponse);
|
||||
expect(response).toEqual(expect.objectContaining({ isTrashed: [true] }));
|
||||
});
|
||||
|
||||
it('should return false for favorite status unless asset owner', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const [{ asset: asset1 }, { asset: asset2 }] = await Promise.all([
|
||||
ctx.newUser().then(async ({ user }) => {
|
||||
const result = await ctx.newAsset({
|
||||
ownerId: user.id,
|
||||
fileCreatedAt: new Date('1970-02-12'),
|
||||
localDateTime: new Date('1970-02-12'),
|
||||
isFavorite: true,
|
||||
});
|
||||
await ctx.newExif({ assetId: result.asset.id, make: 'Canon' });
|
||||
return result;
|
||||
}),
|
||||
ctx.newUser().then(async ({ user }) => {
|
||||
const result = await ctx.newAsset({
|
||||
ownerId: user.id,
|
||||
fileCreatedAt: new Date('1970-02-13'),
|
||||
localDateTime: new Date('1970-02-13'),
|
||||
isFavorite: true,
|
||||
});
|
||||
await ctx.newExif({ assetId: result.asset.id, make: 'Canon' });
|
||||
return result;
|
||||
}),
|
||||
]);
|
||||
|
||||
await Promise.all([
|
||||
ctx.newPartner({ sharedById: asset1.ownerId, sharedWithId: asset2.ownerId }),
|
||||
ctx.newPartner({ sharedById: asset2.ownerId, sharedWithId: asset1.ownerId }),
|
||||
]);
|
||||
|
||||
const auth1 = factory.auth({ user: { id: asset1.ownerId } });
|
||||
const rawResponse1 = await sut.getTimeBucket(auth1, {
|
||||
timeBucket: '1970-02-01',
|
||||
withPartners: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
});
|
||||
const response1 = JSON.parse(rawResponse1);
|
||||
expect(response1).toEqual(expect.objectContaining({ id: [asset2.id, asset1.id], isFavorite: [false, true] }));
|
||||
|
||||
const auth2 = factory.auth({ user: { id: asset2.ownerId } });
|
||||
const rawResponse2 = await sut.getTimeBucket(auth2, {
|
||||
timeBucket: '1970-02-01',
|
||||
withPartners: true,
|
||||
visibility: AssetVisibility.Timeline,
|
||||
});
|
||||
const response2 = JSON.parse(rawResponse2);
|
||||
expect(response2).toEqual(expect.objectContaining({ id: [asset2.id, asset1.id], isFavorite: [true, false] }));
|
||||
});
|
||||
});
|
||||
});
|
||||
181
server/test/medium/specs/services/user.service.spec.ts
Normal file
181
server/test/medium/specs/services/user.service.spec.ts
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { DateTime } from 'luxon';
|
||||
import { ImmichEnvironment, JobName, JobStatus } from 'src/enum';
|
||||
import { ConfigRepository } from 'src/repositories/config.repository';
|
||||
import { CryptoRepository } from 'src/repositories/crypto.repository';
|
||||
import { EventRepository } from 'src/repositories/event.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { UserService } from 'src/services/user.service';
|
||||
import { mediumFactory, newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
process.env.IMMICH_ENV = ImmichEnvironment.Testing;
|
||||
|
||||
return newMediumService(UserService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [CryptoRepository, ConfigRepository, SystemMetadataRepository, UserRepository],
|
||||
mock: [LoggingRepository, JobRepository, EventRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
const { ctx } = setup();
|
||||
await ctx.newUser({ isAdmin: true, email: 'admin@immich.cloud' });
|
||||
});
|
||||
|
||||
describe(UserService.name, () => {
|
||||
describe('create', () => {
|
||||
it('should create a user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const user = mediumFactory.userInsert();
|
||||
await expect(sut.createUser({ name: user.name, email: user.email })).resolves.toEqual(
|
||||
expect.objectContaining({ name: user.name, email: user.email }),
|
||||
);
|
||||
});
|
||||
|
||||
it('should reject user with duplicate email', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const user = mediumFactory.userInsert();
|
||||
await expect(sut.createUser({ email: user.email })).resolves.toMatchObject({ email: user.email });
|
||||
await expect(sut.createUser({ email: user.email })).rejects.toThrow('User exists');
|
||||
});
|
||||
|
||||
it('should not return password', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
ctx.getMock(EventRepository).emit.mockResolvedValue();
|
||||
const dto = mediumFactory.userInsert({ password: 'password' });
|
||||
const user = await sut.createUser({ email: dto.email, password: 'password' });
|
||||
expect((user as any).password).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('search', () => {
|
||||
it('should get users', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth = factory.auth({ user: user1 });
|
||||
|
||||
await expect(sut.search(auth)).resolves.toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ email: user1.email }),
|
||||
expect.objectContaining({ email: user2.email }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should get a user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
await expect(sut.get(user.id)).resolves.toEqual(
|
||||
expect.objectContaining({
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
email: user.email,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not return password', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const result = await sut.get(user.id);
|
||||
|
||||
expect((result as any).password).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('updateMe', () => {
|
||||
it('should update a user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user, result: before } = await ctx.newUser();
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
const after = await sut.updateMe(auth, { name: `${before.name} Updated` });
|
||||
|
||||
expect(before.updatedAt).toBeDefined();
|
||||
expect(after.updatedAt).toBeDefined();
|
||||
expect(before.updatedAt).not.toEqual(after.updatedAt);
|
||||
});
|
||||
});
|
||||
|
||||
describe('setLicense', () => {
|
||||
it('should set a license', async () => {
|
||||
const license = {
|
||||
licenseKey: 'IMCL-FF69-TUK1-RWZU-V9Q8-QGQS-S5GC-X4R2-UFK4',
|
||||
activationKey:
|
||||
'KuX8KsktrBSiXpQMAH0zLgA5SpijXVr_PDkzLdWUlAogCTMBZ0I3KCHXK0eE9EEd7harxup8_EHMeqAWeHo5VQzol6LGECpFv585U9asXD4Zc-UXt3mhJr2uhazqipBIBwJA2YhmUCDy8hiyiGsukDQNu9Rg9C77UeoKuZBWVjWUBWG0mc1iRqfvF0faVM20w53czAzlhaMxzVGc3Oimbd7xi_CAMSujF_2y8QpA3X2fOVkQkzdcH9lV0COejl7IyH27zQQ9HrlrXv3Lai5Hw67kNkaSjmunVBxC5PS0TpKoc9SfBJMaAGWnaDbjhjYUrm-8nIDQnoeEAidDXVAdPw',
|
||||
};
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user: { id: user.id } });
|
||||
await expect(sut.getLicense(auth)).rejects.toThrowError();
|
||||
const after = await sut.setLicense(auth, license);
|
||||
expect(after.licenseKey).toEqual(license.licenseKey);
|
||||
expect(after.activationKey).toEqual(license.activationKey);
|
||||
const getResponse = await sut.getLicense(auth);
|
||||
expect(getResponse).toEqual(after);
|
||||
});
|
||||
});
|
||||
|
||||
describe.sequential('handleUserDeleteCheck', () => {
|
||||
beforeEach(async () => {
|
||||
const { sut } = setup();
|
||||
// These tests specifically have to be sequential otherwise we hit race conditions with config changes applying in incorrect tests
|
||||
const config = await sut.getConfig({ withCache: false });
|
||||
config.user.deleteDelay = 7;
|
||||
await sut.updateConfig(config);
|
||||
});
|
||||
|
||||
it('should work when there are no deleted users', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
jobMock.queueAll.mockResolvedValue(void 0);
|
||||
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
|
||||
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
|
||||
});
|
||||
|
||||
it('should work when there is a user to delete', async () => {
|
||||
const { sut, ctx } = setup(await getKyselyDB());
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
const { user } = await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 60 }).toJSDate() });
|
||||
jobMock.queueAll.mockResolvedValue(void 0);
|
||||
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
|
||||
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([{ name: JobName.UserDelete, data: { id: user.id } }]);
|
||||
});
|
||||
|
||||
it('should skip a recently deleted user', async () => {
|
||||
const { sut, ctx } = setup(await getKyselyDB());
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 5 }).toJSDate() });
|
||||
jobMock.queueAll.mockResolvedValue(void 0);
|
||||
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
|
||||
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
|
||||
});
|
||||
|
||||
it('should respect a custom user delete delay', async () => {
|
||||
const { sut, ctx } = setup(await getKyselyDB());
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
await ctx.newUser({ deletedAt: DateTime.now().minus({ days: 25 }).toJSDate() });
|
||||
jobMock.queueAll.mockResolvedValue(void 0);
|
||||
const config = await sut.getConfig({ withCache: false });
|
||||
config.user.deleteDelay = 30;
|
||||
await sut.updateConfig(config);
|
||||
await expect(sut.handleUserDeleteCheck()).resolves.toEqual(JobStatus.Success);
|
||||
expect(jobMock.queueAll).toHaveBeenCalledExactlyOnceWith([]);
|
||||
});
|
||||
});
|
||||
});
|
||||
70
server/test/medium/specs/services/version.service.spec.ts
Normal file
70
server/test/medium/specs/services/version.service.spec.ts
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { serverVersion } from 'src/constants';
|
||||
import { JobName } from 'src/enum';
|
||||
import { DatabaseRepository } from 'src/repositories/database.repository';
|
||||
import { JobRepository } from 'src/repositories/job.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { VersionHistoryRepository } from 'src/repositories/version-history.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { VersionService } from 'src/services/version.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(VersionService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [DatabaseRepository, VersionHistoryRepository],
|
||||
mock: [LoggingRepository, JobRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(VersionService.name, () => {
|
||||
describe('onBootstrap', () => {
|
||||
it('record the current version on startup', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
|
||||
|
||||
const itemsBefore = await versionHistoryRepo.getAll();
|
||||
expect(itemsBefore).toHaveLength(0);
|
||||
|
||||
await sut.onBootstrap();
|
||||
|
||||
const itemsAfter = await versionHistoryRepo.getAll();
|
||||
expect(itemsAfter).toHaveLength(1);
|
||||
expect(itemsAfter[0]).toEqual({
|
||||
createdAt: expect.any(Date),
|
||||
id: expect.any(String),
|
||||
version: serverVersion.toString(),
|
||||
});
|
||||
});
|
||||
|
||||
it('should queue memory creation when upgrading from 1.128.0', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
|
||||
jobMock.queue.mockResolvedValue(void 0);
|
||||
|
||||
await versionHistoryRepo.create({ version: 'v1.128.0' });
|
||||
await sut.onBootstrap();
|
||||
|
||||
expect(jobMock.queue).toHaveBeenCalledWith({ name: JobName.MemoryGenerate });
|
||||
});
|
||||
|
||||
it('should not queue memory creation when upgrading from 1.129.0', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const jobMock = ctx.getMock(JobRepository);
|
||||
const versionHistoryRepo = ctx.get(VersionHistoryRepository);
|
||||
|
||||
await versionHistoryRepo.create({ version: 'v1.129.0' });
|
||||
await sut.onBootstrap();
|
||||
|
||||
expect(jobMock.queue).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
776
server/test/medium/specs/services/workflow.service.spec.ts
Normal file
776
server/test/medium/specs/services/workflow.service.spec.ts
Normal file
|
|
@ -0,0 +1,776 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { PluginContext, PluginTriggerType } from 'src/enum';
|
||||
import { AccessRepository } from 'src/repositories/access.repository';
|
||||
import { LoggingRepository } from 'src/repositories/logging.repository';
|
||||
import { PluginRepository } from 'src/repositories/plugin.repository';
|
||||
import { WorkflowRepository } from 'src/repositories/workflow.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { WorkflowService } from 'src/services/workflow.service';
|
||||
import { newMediumService } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = (db?: Kysely<DB>) => {
|
||||
return newMediumService(WorkflowService, {
|
||||
database: db || defaultDatabase,
|
||||
real: [WorkflowRepository, PluginRepository, AccessRepository],
|
||||
mock: [LoggingRepository],
|
||||
});
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(WorkflowService.name, () => {
|
||||
let testPluginId: string;
|
||||
let testFilterId: string;
|
||||
let testActionId: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
// Create a test plugin with filters and actions once for all tests
|
||||
const pluginRepo = new PluginRepository(defaultDatabase);
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'test-core-plugin',
|
||||
title: 'Test Core Plugin',
|
||||
description: 'A test core plugin for workflow tests',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: {
|
||||
path: '/test/path.wasm',
|
||||
},
|
||||
filters: [
|
||||
{
|
||||
methodName: 'test-filter',
|
||||
title: 'Test Filter',
|
||||
description: 'A test filter',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
methodName: 'test-action',
|
||||
title: 'Test Action',
|
||||
description: 'A test action',
|
||||
supportedContexts: [PluginContext.Asset],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/plugins/test-core-plugin',
|
||||
);
|
||||
|
||||
testPluginId = result.plugin.id;
|
||||
testFilterId = result.filters[0].id;
|
||||
testActionId = result.actions[0].id;
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await defaultDatabase.deleteFrom('plugin').where('id', '=', testPluginId).execute();
|
||||
});
|
||||
|
||||
describe('create', () => {
|
||||
it('should create a workflow without filters or actions', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
expect(workflow).toMatchObject({
|
||||
id: expect.any(String),
|
||||
ownerId: user.id,
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
});
|
||||
|
||||
it('should create a workflow with filters and actions', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow-with-relations',
|
||||
description: 'A test workflow with filters and actions',
|
||||
enabled: true,
|
||||
filters: [
|
||||
{
|
||||
pluginFilterId: testFilterId,
|
||||
filterConfig: { key: 'value' },
|
||||
},
|
||||
],
|
||||
actions: [
|
||||
{
|
||||
pluginActionId: testActionId,
|
||||
actionConfig: { action: 'test' },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(workflow).toMatchObject({
|
||||
id: expect.any(String),
|
||||
ownerId: user.id,
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow-with-relations',
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
expect(workflow.filters).toHaveLength(1);
|
||||
expect(workflow.filters[0]).toMatchObject({
|
||||
id: expect.any(String),
|
||||
workflowId: workflow.id,
|
||||
pluginFilterId: testFilterId,
|
||||
filterConfig: { key: 'value' },
|
||||
order: 0,
|
||||
});
|
||||
|
||||
expect(workflow.actions).toHaveLength(1);
|
||||
expect(workflow.actions[0]).toMatchObject({
|
||||
id: expect.any(String),
|
||||
workflowId: workflow.id,
|
||||
pluginActionId: testActionId,
|
||||
actionConfig: { action: 'test' },
|
||||
order: 0,
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error when creating workflow with invalid filter', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(
|
||||
sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'invalid-workflow',
|
||||
description: 'A workflow with invalid filter',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: factory.uuid(), filterConfig: { key: 'value' } }],
|
||||
actions: [],
|
||||
}),
|
||||
).rejects.toThrow('Invalid filter ID');
|
||||
});
|
||||
|
||||
it('should throw error when creating workflow with invalid action', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(
|
||||
sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'invalid-workflow',
|
||||
description: 'A workflow with invalid action',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ pluginActionId: factory.uuid(), actionConfig: { action: 'test' } }],
|
||||
}),
|
||||
).rejects.toThrow('Invalid action ID');
|
||||
});
|
||||
|
||||
it('should throw error when filter does not support trigger context', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
// Create a plugin with a filter that only supports Album context
|
||||
const pluginRepo = new PluginRepository(defaultDatabase);
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'album-only-plugin',
|
||||
title: 'Album Only Plugin',
|
||||
description: 'Plugin with album-only filter',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/test/album-plugin.wasm' },
|
||||
filters: [
|
||||
{
|
||||
methodName: 'album-filter',
|
||||
title: 'Album Filter',
|
||||
description: 'A filter that only works with albums',
|
||||
supportedContexts: [PluginContext.Album],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/plugins/test-core-plugin',
|
||||
);
|
||||
|
||||
await expect(
|
||||
sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'invalid-context-workflow',
|
||||
description: 'A workflow with context mismatch',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: result.filters[0].id }],
|
||||
actions: [],
|
||||
}),
|
||||
).rejects.toThrow('does not support asset context');
|
||||
});
|
||||
|
||||
it('should throw error when action does not support trigger context', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
// Create a plugin with an action that only supports Person context
|
||||
const pluginRepo = new PluginRepository(defaultDatabase);
|
||||
const result = await pluginRepo.loadPlugin(
|
||||
{
|
||||
name: 'person-only-plugin',
|
||||
title: 'Person Only Plugin',
|
||||
description: 'Plugin with person-only action',
|
||||
author: 'Test Author',
|
||||
version: '1.0.0',
|
||||
wasm: { path: '/test/person-plugin.wasm' },
|
||||
actions: [
|
||||
{
|
||||
methodName: 'person-action',
|
||||
title: 'Person Action',
|
||||
description: 'An action that only works with persons',
|
||||
supportedContexts: [PluginContext.Person],
|
||||
schema: undefined,
|
||||
},
|
||||
],
|
||||
},
|
||||
'/plugins/test-core-plugin',
|
||||
);
|
||||
|
||||
await expect(
|
||||
sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'invalid-context-workflow',
|
||||
description: 'A workflow with context mismatch',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ pluginActionId: result.actions[0].id }],
|
||||
}),
|
||||
).rejects.toThrow('does not support asset context');
|
||||
});
|
||||
|
||||
it('should create workflow with multiple filters and actions in correct order', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'multi-step-workflow',
|
||||
description: 'A workflow with multiple filters and actions',
|
||||
enabled: true,
|
||||
filters: [
|
||||
{ pluginFilterId: testFilterId, filterConfig: { step: 1 } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { step: 2 } },
|
||||
],
|
||||
actions: [
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 1 } },
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 2 } },
|
||||
{ pluginActionId: testActionId, actionConfig: { step: 3 } },
|
||||
],
|
||||
});
|
||||
|
||||
expect(workflow.filters).toHaveLength(2);
|
||||
expect(workflow.filters[0].order).toBe(0);
|
||||
expect(workflow.filters[0].filterConfig).toEqual({ step: 1 });
|
||||
expect(workflow.filters[1].order).toBe(1);
|
||||
expect(workflow.filters[1].filterConfig).toEqual({ step: 2 });
|
||||
|
||||
expect(workflow.actions).toHaveLength(3);
|
||||
expect(workflow.actions[0].order).toBe(0);
|
||||
expect(workflow.actions[1].order).toBe(1);
|
||||
expect(workflow.actions[2].order).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should return all workflows for a user', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow1 = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'workflow-1',
|
||||
description: 'First workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const workflow2 = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'workflow-2',
|
||||
description: 'Second workflow',
|
||||
enabled: false,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const workflows = await sut.getAll(auth);
|
||||
|
||||
expect(workflows).toHaveLength(2);
|
||||
expect(workflows).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({ id: workflow1.id, name: 'workflow-1' }),
|
||||
expect.objectContaining({ id: workflow2.id, name: 'workflow-2' }),
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
it('should return empty array when user has no workflows', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflows = await sut.getAll(auth);
|
||||
|
||||
expect(workflows).toEqual([]);
|
||||
});
|
||||
|
||||
it('should not return workflows from other users', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth1 = factory.auth({ user: user1 });
|
||||
const auth2 = factory.auth({ user: user2 });
|
||||
|
||||
await sut.create(auth1, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'user1-workflow',
|
||||
description: 'User 1 workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const user2Workflows = await sut.getAll(auth2);
|
||||
|
||||
expect(user2Workflows).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('get', () => {
|
||||
it('should return a specific workflow by id', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { key: 'value' } }],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: { action: 'test' } }],
|
||||
});
|
||||
|
||||
const workflow = await sut.get(auth, created.id);
|
||||
|
||||
expect(workflow).toMatchObject({
|
||||
id: created.id,
|
||||
name: 'test-workflow',
|
||||
description: 'A test workflow',
|
||||
enabled: true,
|
||||
});
|
||||
expect(workflow.filters).toHaveLength(1);
|
||||
expect(workflow.actions).toHaveLength(1);
|
||||
});
|
||||
|
||||
it('should throw error when workflow does not exist', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(sut.get(auth, '66da82df-e424-4bf4-b6f3-5d8e71620dae')).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when user does not have access to workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth1 = factory.auth({ user: user1 });
|
||||
const auth2 = factory.auth({ user: user2 });
|
||||
|
||||
const workflow = await sut.create(auth1, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'private-workflow',
|
||||
description: 'Private workflow',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(sut.get(auth2, workflow.id)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('update', () => {
|
||||
it('should update workflow basic fields', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'original-workflow',
|
||||
description: 'Original description',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
name: 'updated-workflow',
|
||||
description: 'Updated description',
|
||||
enabled: false,
|
||||
});
|
||||
|
||||
expect(updated).toMatchObject({
|
||||
id: created.id,
|
||||
name: 'updated-workflow',
|
||||
description: 'Updated description',
|
||||
enabled: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should update workflow filters', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { old: 'config' } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
filters: [
|
||||
{ pluginFilterId: testFilterId, filterConfig: { new: 'config' } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { second: 'filter' } },
|
||||
],
|
||||
});
|
||||
|
||||
expect(updated.filters).toHaveLength(2);
|
||||
expect(updated.filters[0].filterConfig).toEqual({ new: 'config' });
|
||||
expect(updated.filters[1].filterConfig).toEqual({ second: 'filter' });
|
||||
});
|
||||
|
||||
it('should update workflow actions', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: { old: 'config' } }],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
actions: [
|
||||
{ pluginActionId: testActionId, actionConfig: { new: 'config' } },
|
||||
{ pluginActionId: testActionId, actionConfig: { second: 'action' } },
|
||||
],
|
||||
});
|
||||
|
||||
expect(updated.actions).toHaveLength(2);
|
||||
expect(updated.actions[0].actionConfig).toEqual({ new: 'config' });
|
||||
expect(updated.actions[1].actionConfig).toEqual({ second: 'action' });
|
||||
});
|
||||
|
||||
it('should clear filters when updated with empty array', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { key: 'value' } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
const updated = await sut.update(auth, created.id, {
|
||||
filters: [],
|
||||
});
|
||||
|
||||
expect(updated.filters).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should throw error when no fields to update', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(sut.update(auth, created.id, {})).rejects.toThrow('No fields to update');
|
||||
});
|
||||
|
||||
it('should throw error when updating non-existent workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(sut.update(auth, factory.uuid(), { name: 'updated-name' })).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when user does not have access to update workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth1 = factory.auth({ user: user1 });
|
||||
const auth2 = factory.auth({ user: user2 });
|
||||
|
||||
const workflow = await sut.create(auth1, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'private-workflow',
|
||||
description: 'Private',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.update(auth2, workflow.id, {
|
||||
name: 'hacked-workflow',
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when updating with invalid filter', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.update(auth, created.id, {
|
||||
filters: [{ pluginFilterId: factory.uuid(), filterConfig: {} }],
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when updating with invalid action', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(
|
||||
sut.update(auth, created.id, { actions: [{ pluginActionId: factory.uuid(), actionConfig: {} }] }),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should update trigger type', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.PersonRecognized,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.update(auth, created.id, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
});
|
||||
|
||||
const fetched = await sut.get(auth, created.id);
|
||||
expect(fetched.triggerType).toBe(PluginTriggerType.AssetCreate);
|
||||
});
|
||||
|
||||
it('should add filters', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.update(auth, created.id, {
|
||||
filters: [
|
||||
{ pluginFilterId: testFilterId, filterConfig: { first: true } },
|
||||
{ pluginFilterId: testFilterId, filterConfig: { second: true } },
|
||||
],
|
||||
});
|
||||
|
||||
const fetched = await sut.get(auth, created.id);
|
||||
expect(fetched.filters).toHaveLength(2);
|
||||
expect(fetched.filters[0].filterConfig).toEqual({ first: true });
|
||||
expect(fetched.filters[1].filterConfig).toEqual({ second: true });
|
||||
});
|
||||
|
||||
it('should replace existing filters', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { original: true } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.update(auth, created.id, {
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { replaced: true } }],
|
||||
});
|
||||
|
||||
const fetched = await sut.get(auth, created.id);
|
||||
expect(fetched.filters).toHaveLength(1);
|
||||
expect(fetched.filters[0].filterConfig).toEqual({ replaced: true });
|
||||
});
|
||||
|
||||
it('should remove existing filters', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const created = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: { toRemove: true } }],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.update(auth, created.id, {
|
||||
filters: [],
|
||||
});
|
||||
|
||||
const fetched = await sut.get(auth, created.id);
|
||||
expect(fetched.filters).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete a workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await sut.delete(auth, workflow.id);
|
||||
|
||||
await expect(sut.get(auth, workflow.id)).rejects.toThrow('Not found or no workflow.read access');
|
||||
});
|
||||
|
||||
it('should delete workflow with filters and actions', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
const workflow = await sut.create(auth, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'test-workflow',
|
||||
description: 'Test',
|
||||
enabled: true,
|
||||
filters: [{ pluginFilterId: testFilterId, filterConfig: {} }],
|
||||
actions: [{ pluginActionId: testActionId, actionConfig: {} }],
|
||||
});
|
||||
|
||||
await sut.delete(auth, workflow.id);
|
||||
|
||||
await expect(sut.get(auth, workflow.id)).rejects.toThrow('Not found or no workflow.read access');
|
||||
});
|
||||
|
||||
it('should throw error when deleting non-existent workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user } = await ctx.newUser();
|
||||
const auth = factory.auth({ user });
|
||||
|
||||
await expect(sut.delete(auth, factory.uuid())).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should throw error when user does not have access to delete workflow', async () => {
|
||||
const { sut, ctx } = setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const auth1 = factory.auth({ user: user1 });
|
||||
const auth2 = factory.auth({ user: user2 });
|
||||
|
||||
const workflow = await sut.create(auth1, {
|
||||
triggerType: PluginTriggerType.AssetCreate,
|
||||
name: 'private-workflow',
|
||||
description: 'Private',
|
||||
enabled: true,
|
||||
filters: [],
|
||||
actions: [],
|
||||
});
|
||||
|
||||
await expect(sut.delete(auth2, workflow.id)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
373
server/test/medium/specs/sync/sync-album-asset-exif.spec.ts
Normal file
373
server/test/medium/specs/sync/sync-album-asset-exif.spec.ts
Normal file
|
|
@ -0,0 +1,373 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { updateLockedColumns } from 'src/utils/database';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB, wait } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
const updateSyncAck = {
|
||||
ack: expect.stringContaining(SyncEntityType.AlbumAssetExifUpdateV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
};
|
||||
|
||||
const backfillSyncAck = {
|
||||
ack: expect.stringContaining(SyncEntityType.AlbumAssetExifBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.AlbumAssetExifsV1, () => {
|
||||
it('should detect and sync the first album asset exif', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
city: null,
|
||||
country: null,
|
||||
dateTimeOriginal: null,
|
||||
description: '',
|
||||
exifImageHeight: null,
|
||||
exifImageWidth: null,
|
||||
exposureTime: null,
|
||||
fNumber: null,
|
||||
fileSizeInByte: null,
|
||||
focalLength: null,
|
||||
fps: null,
|
||||
iso: null,
|
||||
latitude: null,
|
||||
lensModel: null,
|
||||
longitude: null,
|
||||
make: 'Canon',
|
||||
model: null,
|
||||
modifyDate: null,
|
||||
orientation: null,
|
||||
profileDescription: null,
|
||||
projectionType: null,
|
||||
rating: null,
|
||||
state: null,
|
||||
timeZone: null,
|
||||
},
|
||||
type: SyncEntityType.AlbumAssetExifCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should sync album asset exif for own user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.SyncAckV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.AlbumAssetExifCreateV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not sync album asset exif for unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: user3.id, role: AlbumUserRole.Editor });
|
||||
const { session } = await ctx.newSession({ userId: user3.id });
|
||||
const authUser3 = factory.auth({ session, user: user3 });
|
||||
|
||||
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should backfill album assets exif when a user shares an album with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
const { asset: asset1User2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: asset1User2.id, make: 'asset1User2' });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset1User2.id });
|
||||
await wait(2);
|
||||
const { asset: asset2User2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: asset2User2.id, make: 'asset2User2' });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset2User2.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumAsset({ albumId: album1.id, assetId: asset2User2.id });
|
||||
await wait(2);
|
||||
const { asset: asset3User2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset3User2.id });
|
||||
await ctx.newExif({ assetId: asset3User2.id, make: 'asset3User2' });
|
||||
await wait(2);
|
||||
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: asset2User2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
// ack initial album asset exif sync
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
// create a second album
|
||||
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
// should backfill the album user
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: asset1User2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: asset2User2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifBackfillV1,
|
||||
},
|
||||
backfillSyncAck,
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: asset3User2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should sync old asset exif when a user adds them to an album they share you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset: firstAsset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'firstAsset' });
|
||||
await ctx.newExif({ assetId: firstAsset.id, make: 'firstAsset' });
|
||||
const { asset: secondAsset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'secondAsset' });
|
||||
await ctx.newExif({ assetId: secondAsset.id, make: 'secondAsset' });
|
||||
const { asset: album1Asset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'album1Asset' });
|
||||
await ctx.newExif({ assetId: album1Asset.id, make: 'album1Asset' });
|
||||
const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: firstAsset.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumAsset({ albumId: album1.id, assetId: album1Asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const firstAlbumResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
expect(firstAlbumResponse).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: album1Asset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, firstAlbumResponse);
|
||||
|
||||
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: firstAsset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifBackfillV1,
|
||||
},
|
||||
backfillSyncAck,
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
// ack initial album asset sync
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: secondAsset.id });
|
||||
await wait(2);
|
||||
|
||||
// should backfill the new asset even though it's older than the first asset
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: secondAsset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should sync asset exif updates for an album shared with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'asset' });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: asset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
// update the asset
|
||||
const assetRepository = ctx.get(AssetRepository);
|
||||
await assetRepository.upsertExif(
|
||||
updateLockedColumns({
|
||||
assetId: asset.id,
|
||||
city: 'New City',
|
||||
}),
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: asset.id,
|
||||
city: 'New City',
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifUpdateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should sync delayed asset exif creates for an album shared with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset: assetWithExif } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: assetWithExif.id, make: 'assetWithExif' });
|
||||
const { asset: assetDelayedExif } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
const { asset: newerAsset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: newerAsset.id, make: 'newerAsset' });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: assetWithExif.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: assetDelayedExif.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: newerAsset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: assetWithExif.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifCreateV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: newerAsset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
// update the asset
|
||||
const assetRepository = ctx.get(AssetRepository);
|
||||
await assetRepository.upsertExif(
|
||||
updateLockedColumns({
|
||||
assetId: assetDelayedExif.id,
|
||||
city: 'Delayed Exif',
|
||||
}),
|
||||
{ lockedPropertiesBehavior: 'append' },
|
||||
);
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetExifsV1])).resolves.toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: assetDelayedExif.id,
|
||||
city: 'Delayed Exif',
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetExifUpdateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
312
server/test/medium/specs/sync/sync-album-asset.spec.ts
Normal file
312
server/test/medium/specs/sync/sync-album-asset.spec.ts
Normal file
|
|
@ -0,0 +1,312 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB, wait } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
const updateSyncAck = {
|
||||
ack: expect.stringContaining(SyncEntityType.AlbumAssetUpdateV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
};
|
||||
|
||||
const backfillSyncAck = {
|
||||
ack: expect.stringContaining(SyncEntityType.AlbumAssetBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.AlbumAssetsV1, () => {
|
||||
it('should detect and sync the first album asset', async () => {
|
||||
const originalFileName = 'firstAsset';
|
||||
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const date = new Date().toISOString();
|
||||
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({
|
||||
originalFileName,
|
||||
ownerId: user2.id,
|
||||
checksum: Buffer.from(checksum, 'base64'),
|
||||
thumbhash: Buffer.from(thumbhash, 'base64'),
|
||||
fileCreatedAt: date,
|
||||
fileModifiedAt: date,
|
||||
localDateTime: date,
|
||||
deletedAt: null,
|
||||
duration: '0:10:00.00000',
|
||||
livePhotoVideoId: null,
|
||||
stackId: null,
|
||||
libraryId: null,
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
});
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
id: asset.id,
|
||||
originalFileName,
|
||||
ownerId: asset.ownerId,
|
||||
thumbhash,
|
||||
checksum,
|
||||
deletedAt: asset.deletedAt,
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
isFavorite: asset.isFavorite,
|
||||
localDateTime: asset.localDateTime,
|
||||
type: asset.type,
|
||||
visibility: asset.visibility,
|
||||
duration: asset.duration,
|
||||
livePhotoVideoId: asset.livePhotoVideoId,
|
||||
stackId: asset.stackId,
|
||||
libraryId: asset.libraryId,
|
||||
width: asset.width,
|
||||
height: asset.height,
|
||||
isEdited: asset.isEdited,
|
||||
},
|
||||
type: SyncEntityType.AlbumAssetCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
});
|
||||
|
||||
it('should sync album asset for own user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.SyncAckV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.AlbumAssetCreateV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not sync album asset for unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user3.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: user3.id, role: AlbumUserRole.Editor });
|
||||
const { session } = await ctx.newSession({ userId: user3.id });
|
||||
const authUser3 = factory.auth({ session, user: user3 });
|
||||
|
||||
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
});
|
||||
|
||||
it('should backfill album assets when a user shares an album with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
const { asset: asset1User2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset1User2.id });
|
||||
await wait(2);
|
||||
const { asset: asset2User2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset2User2.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumAsset({ albumId: album1.id, assetId: asset2User2.id });
|
||||
await wait(2);
|
||||
const { asset: asset3User2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: asset3User2.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: asset2User2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
// ack initial album asset sync
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
// should backfill the album user
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: asset1User2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: asset2User2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetBackfillV1,
|
||||
},
|
||||
backfillSyncAck,
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: asset3User2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
});
|
||||
|
||||
it('should sync old assets when a user adds them to an album they share you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset: firstAsset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'firstAsset' });
|
||||
const { asset: secondAsset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'secondAsset' });
|
||||
const { asset: album1Asset } = await ctx.newAsset({ ownerId: user2.id, originalFileName: 'album1Asset' });
|
||||
const { album: album1 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: firstAsset.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumAsset({ albumId: album1.id, assetId: album1Asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const firstAlbumResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
expect(firstAlbumResponse).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: album1Asset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, firstAlbumResponse);
|
||||
|
||||
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: firstAsset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetBackfillV1,
|
||||
},
|
||||
backfillSyncAck,
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
// ack initial album asset sync
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: secondAsset.id });
|
||||
await wait(2);
|
||||
|
||||
// should backfill the new asset even though it's older than the first asset
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: secondAsset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
});
|
||||
|
||||
it('should sync asset updates for an album shared with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id, isFavorite: false });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await wait(2);
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
updateSyncAck,
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: asset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetCreateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
// update the asset
|
||||
const assetRepository = ctx.get(AssetRepository);
|
||||
await assetRepository.update({
|
||||
id: asset.id,
|
||||
isFavorite: true,
|
||||
});
|
||||
|
||||
const updateResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumAssetsV1]);
|
||||
expect(updateResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: asset.id,
|
||||
isFavorite: true,
|
||||
}),
|
||||
type: SyncEntityType.AlbumAssetUpdateV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
265
server/test/medium/specs/sync/sync-album-to-asset.spec.ts
Normal file
265
server/test/medium/specs/sync/sync-album-to-asset.spec.ts
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB, wait } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.AlbumToAssetsV1, () => {
|
||||
it('should detect and sync the first album to asset relation', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should sync album to asset for owned albums', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync the album to asset for shared albums', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should not sync album to asset for an album owned by another user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should backfill album to assets when a user shares an album with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset: album1Asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { asset: album2Asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
// Backfill album
|
||||
const { album: album2 } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumAsset({ albumId: album2.id, assetId: album2Asset.id });
|
||||
await wait(2);
|
||||
const { album: album1 } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album1.id, assetId: album1Asset.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album1.id,
|
||||
assetId: album1Asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
// ack initial album to asset sync
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
// add user to backfill album
|
||||
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
// should backfill the album to asset relation
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: album2.id,
|
||||
assetId: album2Asset.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumToAssetBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.AlbumToAssetBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted album to asset relation', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumRepo = ctx.get(AlbumRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await albumRepo.removeAssetIds(album.id, [asset.id]);
|
||||
await wait(2);
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted album to asset relation when an asset is deleted', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await assetRepo.remove({ id: asset.id });
|
||||
await wait(2);
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should not sync a deleted album to asset relation when the album is deleted', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumRepo = ctx.get(AlbumRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
await ctx.newAlbumAsset({ albumId: album.id, assetId: asset.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumToAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await albumRepo.delete(album.id);
|
||||
await wait(2);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumToAssetsV1]);
|
||||
});
|
||||
});
|
||||
306
server/test/medium/specs/sync/sync-album-user.spec.ts
Normal file
306
server/test/medium/specs/sync/sync-album-user.spec.ts
Normal file
|
|
@ -0,0 +1,306 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AlbumUserRepository } from 'src/repositories/album-user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB, wait } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.AlbumUsersV1, () => {
|
||||
it('should sync an album user with the correct properties', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
const { user } = await ctx.newUser();
|
||||
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1])).resolves.toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: albumUser.albumId,
|
||||
role: albumUser.role,
|
||||
userId: albumUser.userId,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
|
||||
describe('owner', () => {
|
||||
it('should detect and sync a new shared user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: albumUser.albumId,
|
||||
role: albumUser.role,
|
||||
userId: albumUser.userId,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync an updated shared user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumUserRepo = ctx.get(AlbumUserRepository);
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
|
||||
await albumUserRepo.update({ albumId: album.id, userId: user1.id }, { role: AlbumUserRole.Viewer });
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: albumUser.albumId,
|
||||
role: AlbumUserRole.Viewer,
|
||||
userId: albumUser.userId,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted shared user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumUserRepo = ctx.get(AlbumUserRepository);
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
const { albumUser } = await ctx.newAlbumUser({ albumId: album.id, userId: user1.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
|
||||
await albumUserRepo.delete({ albumId: album.id, userId: user1.id });
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: albumUser.albumId,
|
||||
userId: albumUser.userId,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('shared user', () => {
|
||||
it('should detect and sync a new shared user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: user1.id });
|
||||
const { albumUser } = await ctx.newAlbumUser({
|
||||
albumId: album.id,
|
||||
userId: auth.user.id,
|
||||
role: AlbumUserRole.Editor,
|
||||
});
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: albumUser.albumId,
|
||||
role: albumUser.role,
|
||||
userId: albumUser.userId,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync an updated shared user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumUserRepo = ctx.get(AlbumUserRepository);
|
||||
const { user: owner } = await ctx.newUser();
|
||||
const { user: user } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: owner.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(response).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
|
||||
await albumUserRepo.update({ albumId: album.id, userId: user.id }, { role: AlbumUserRole.Viewer });
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: album.id,
|
||||
role: AlbumUserRole.Viewer,
|
||||
userId: user.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted shared user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumUserRepo = ctx.get(AlbumUserRepository);
|
||||
const { user: owner } = await ctx.newUser();
|
||||
const { user: user } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: owner.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(response).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.AlbumUserV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
await albumUserRepo.delete({ albumId: album.id, userId: user.id });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: album.id,
|
||||
userId: user.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
});
|
||||
|
||||
it('should backfill album users when a user shares an album with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user1 } = await ctx.newUser();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { album: album1 } = await ctx.newAlbum({ ownerId: user1.id });
|
||||
const { album: album2 } = await ctx.newAlbum({ ownerId: user1.id });
|
||||
// backfill album user
|
||||
await ctx.newAlbumUser({ albumId: album1.id, userId: user1.id, role: AlbumUserRole.Editor });
|
||||
await wait(2);
|
||||
// initial album user
|
||||
await ctx.newAlbumUser({ albumId: album2.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
await wait(2);
|
||||
// post checkpoint album user
|
||||
await ctx.newAlbumUser({ albumId: album1.id, userId: user2.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: album2.id,
|
||||
role: AlbumUserRole.Editor,
|
||||
userId: auth.user.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
// ack initial user
|
||||
await ctx.syncAckAll(auth, response);
|
||||
// get access to the backfill album user
|
||||
await ctx.newAlbumUser({ albumId: album1.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
// should backfill the album user
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: album1.id,
|
||||
role: AlbumUserRole.Editor,
|
||||
userId: user1.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.AlbumUserBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: album1.id,
|
||||
role: AlbumUserRole.Editor,
|
||||
userId: user2.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
albumId: album1.id,
|
||||
role: AlbumUserRole.Editor,
|
||||
userId: auth.user.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumUserV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumUsersV1]);
|
||||
});
|
||||
});
|
||||
});
|
||||
235
server/test/medium/specs/sync/sync-album.spec.ts
Normal file
235
server/test/medium/specs/sync/sync-album.spec.ts
Normal file
|
|
@ -0,0 +1,235 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AlbumUserRole, SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AlbumUserRepository } from 'src/repositories/album-user.repository';
|
||||
import { AlbumRepository } from 'src/repositories/album.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.AlbumsV1, () => {
|
||||
it('should sync an album with the correct properties', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: album.id,
|
||||
name: album.albumName,
|
||||
ownerId: album.ownerId,
|
||||
}),
|
||||
type: SyncEntityType.AlbumV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a new album', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: album.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync an album delete', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumRepo = ctx.get(AlbumRepository);
|
||||
const { album } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: album.id,
|
||||
}),
|
||||
type: SyncEntityType.AlbumV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await albumRepo.delete(album.id);
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
albumId: album.id,
|
||||
},
|
||||
type: SyncEntityType.AlbumDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
});
|
||||
|
||||
describe('shared albums', () => {
|
||||
it('should detect and sync an album create', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: album.id }),
|
||||
type: SyncEntityType.AlbumV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync an album share (share before sync)', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: album.id }),
|
||||
type: SyncEntityType.AlbumV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync an album share (share after sync)', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { album: userAlbum } = await ctx.newAlbum({ ownerId: auth.user.id });
|
||||
const { album: user2Album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: userAlbum.id }),
|
||||
type: SyncEntityType.AlbumV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.newAlbumUser({ userId: auth.user.id, albumId: user2Album.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: user2Album.id }),
|
||||
type: SyncEntityType.AlbumV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync an album delete`', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumRepo = ctx.get(AlbumRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(response).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AlbumV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
|
||||
await albumRepo.delete(album.id);
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: { albumId: album.id },
|
||||
type: SyncEntityType.AlbumDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync an album unshare as an album delete', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const albumUserRepo = ctx.get(AlbumUserRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { album } = await ctx.newAlbum({ ownerId: user2.id });
|
||||
await ctx.newAlbumUser({ albumId: album.id, userId: auth.user.id, role: AlbumUserRole.Editor });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(response).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AlbumV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
|
||||
await albumUserRepo.delete({ albumId: album.id, userId: auth.user.id });
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AlbumsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: { albumId: album.id },
|
||||
type: SyncEntityType.AlbumDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AlbumsV1]);
|
||||
});
|
||||
});
|
||||
});
|
||||
81
server/test/medium/specs/sync/sync-asset-exif.spec.ts
Normal file
81
server/test/medium/specs/sync/sync-asset-exif.spec.ts
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.AssetExifsV1, () => {
|
||||
it('should detect and sync the first asset exif', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
city: null,
|
||||
country: null,
|
||||
dateTimeOriginal: null,
|
||||
description: '',
|
||||
exifImageHeight: null,
|
||||
exifImageWidth: null,
|
||||
exposureTime: null,
|
||||
fNumber: null,
|
||||
fileSizeInByte: null,
|
||||
focalLength: null,
|
||||
fps: null,
|
||||
iso: null,
|
||||
latitude: null,
|
||||
lensModel: null,
|
||||
longitude: null,
|
||||
make: 'Canon',
|
||||
model: null,
|
||||
modifyDate: null,
|
||||
orientation: null,
|
||||
profileDescription: null,
|
||||
projectionType: null,
|
||||
rating: null,
|
||||
state: null,
|
||||
timeZone: null,
|
||||
},
|
||||
type: SyncEntityType.AssetExifV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should only sync asset exif for own user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const { session } = await ctx.newSession({ userId: user2.id });
|
||||
const auth2 = factory.auth({ session, user: user2 });
|
||||
|
||||
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetExifsV1]);
|
||||
});
|
||||
});
|
||||
99
server/test/medium/specs/sync/sync-asset-face.spec.ts
Normal file
99
server/test/medium/specs/sync/sync-asset-face.spec.ts
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.AssetFaceV1, () => {
|
||||
it('should detect and sync the first asset face', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
const { person } = await ctx.newPerson({ ownerId: auth.user.id });
|
||||
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id, personId: person.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: assetFace.id,
|
||||
assetId: asset.id,
|
||||
personId: person.id,
|
||||
imageWidth: assetFace.imageWidth,
|
||||
imageHeight: assetFace.imageHeight,
|
||||
boundingBoxX1: assetFace.boundingBoxX1,
|
||||
boundingBoxY1: assetFace.boundingBoxY1,
|
||||
boundingBoxX2: assetFace.boundingBoxX2,
|
||||
boundingBoxY2: assetFace.boundingBoxY2,
|
||||
sourceType: assetFace.sourceType,
|
||||
}),
|
||||
type: 'AssetFaceV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted asset face', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const personRepo = ctx.get(PersonRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id });
|
||||
await personRepo.deleteAssetFace(assetFace.id);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetFacesV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetFaceId: assetFace.id,
|
||||
},
|
||||
type: 'AssetFaceDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
|
||||
});
|
||||
|
||||
it('should not sync an asset face or asset face delete for an unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const personRepo = ctx.get(PersonRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { session } = await ctx.newSession({ userId: user2.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { assetFace } = await ctx.newAssetFace({ assetId: asset.id });
|
||||
const auth2 = factory.auth({ session, user: user2 });
|
||||
|
||||
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetFaceV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
|
||||
|
||||
await personRepo.deleteAssetFace(assetFace.id);
|
||||
|
||||
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetFacesV1])).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetFaceDeleteV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetFacesV1]);
|
||||
});
|
||||
});
|
||||
128
server/test/medium/specs/sync/sync-asset-metadata.spec.ts
Normal file
128
server/test/medium/specs/sync/sync-asset-metadata.spec.ts
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { AssetMetadataKey, SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.AssetMetadataV1, () => {
|
||||
it('should detect and sync new asset metadata', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
key: AssetMetadataKey.MobileApp,
|
||||
assetId: asset.id,
|
||||
value: { iCloudId: 'abc123' },
|
||||
},
|
||||
type: 'AssetMetadataV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetMetadataV1]);
|
||||
});
|
||||
|
||||
it('should update asset metadata', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
key: AssetMetadataKey.MobileApp,
|
||||
assetId: asset.id,
|
||||
value: { iCloudId: 'abc123' },
|
||||
},
|
||||
type: 'AssetMetadataV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc456' } }]);
|
||||
|
||||
const updatedResponse = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
|
||||
expect(updatedResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
key: AssetMetadataKey.MobileApp,
|
||||
assetId: asset.id,
|
||||
value: { iCloudId: 'abc456' },
|
||||
},
|
||||
type: 'AssetMetadataV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, updatedResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetMetadataV1]);
|
||||
});
|
||||
});
|
||||
|
||||
describe(SyncEntityType.AssetMetadataDeleteV1, () => {
|
||||
it('should delete and sync asset metadata', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
await assetRepo.upsertMetadata(asset.id, [{ key: AssetMetadataKey.MobileApp, value: { iCloudId: 'abc123' } }]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
key: AssetMetadataKey.MobileApp,
|
||||
assetId: asset.id,
|
||||
value: { iCloudId: 'abc123' },
|
||||
},
|
||||
type: 'AssetMetadataV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await assetRepo.deleteMetadataByKey(asset.id, AssetMetadataKey.MobileApp);
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetMetadataV1])).resolves.toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
key: AssetMetadataKey.MobileApp,
|
||||
},
|
||||
type: 'AssetMetadataDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
121
server/test/medium/specs/sync/sync-asset.spec.ts
Normal file
121
server/test/medium/specs/sync/sync-asset.spec.ts
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.AssetV1, () => {
|
||||
it('should detect and sync the first asset', async () => {
|
||||
const originalFileName = 'firstAsset';
|
||||
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const date = new Date().toISOString();
|
||||
|
||||
const { auth, ctx } = await setup();
|
||||
const { asset } = await ctx.newAsset({
|
||||
originalFileName,
|
||||
ownerId: auth.user.id,
|
||||
checksum: Buffer.from(checksum, 'base64'),
|
||||
thumbhash: Buffer.from(thumbhash, 'base64'),
|
||||
fileCreatedAt: date,
|
||||
fileModifiedAt: date,
|
||||
localDateTime: date,
|
||||
deletedAt: null,
|
||||
duration: '0:10:00.00000',
|
||||
libraryId: null,
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
});
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
id: asset.id,
|
||||
originalFileName,
|
||||
ownerId: asset.ownerId,
|
||||
thumbhash,
|
||||
checksum,
|
||||
deletedAt: asset.deletedAt,
|
||||
fileCreatedAt: asset.fileCreatedAt,
|
||||
fileModifiedAt: asset.fileModifiedAt,
|
||||
isFavorite: asset.isFavorite,
|
||||
localDateTime: asset.localDateTime,
|
||||
type: asset.type,
|
||||
visibility: asset.visibility,
|
||||
duration: asset.duration,
|
||||
stackId: null,
|
||||
livePhotoVideoId: null,
|
||||
libraryId: asset.libraryId,
|
||||
width: asset.width,
|
||||
height: asset.height,
|
||||
isEdited: asset.isEdited,
|
||||
},
|
||||
type: 'AssetV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted asset', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
await assetRepo.remove(asset);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: 'AssetDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
|
||||
});
|
||||
|
||||
it('should not sync an asset or asset delete for an unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { session } = await ctx.newSession({ userId: user2.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const auth2 = factory.auth({ session, user: user2 });
|
||||
|
||||
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
|
||||
|
||||
await assetRepo.remove(asset);
|
||||
expect(await ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
|
||||
});
|
||||
});
|
||||
106
server/test/medium/specs/sync/sync-auth-user.spec.ts
Normal file
106
server/test/medium/specs/sync/sync-auth-user.spec.ts
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.AuthUserV1, () => {
|
||||
it('should detect and sync the first user', async () => {
|
||||
const { auth, user, ctx } = await setup(await getKyselyDB());
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
id: user.id,
|
||||
isAdmin: user.isAdmin,
|
||||
deletedAt: user.deletedAt,
|
||||
name: user.name,
|
||||
avatarColor: user.avatarColor,
|
||||
email: user.email,
|
||||
pinCode: user.pinCode,
|
||||
hasProfileImage: false,
|
||||
profileChangedAt: (user.profileChangedAt as Date).toISOString(),
|
||||
oauthId: user.oauthId,
|
||||
quotaSizeInBytes: user.quotaSizeInBytes,
|
||||
quotaUsageInBytes: user.quotaUsageInBytes,
|
||||
storageLabel: user.storageLabel,
|
||||
},
|
||||
type: 'AuthUserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AuthUsersV1]);
|
||||
});
|
||||
|
||||
it('should sync a change and then another change to that same user', async () => {
|
||||
const { auth, user, ctx } = await setup(await getKyselyDB());
|
||||
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: user.id,
|
||||
isAdmin: false,
|
||||
}),
|
||||
type: 'AuthUserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await userRepo.update(user.id, { isAdmin: true });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: user.id,
|
||||
isAdmin: true,
|
||||
}),
|
||||
type: 'AuthUserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should only sync the auth user', async () => {
|
||||
const { auth, user, ctx } = await setup(await getKyselyDB());
|
||||
|
||||
await ctx.newUser();
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AuthUsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: user.id,
|
||||
isAdmin: false,
|
||||
}),
|
||||
type: 'AuthUserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
60
server/test/medium/specs/sync/sync-complete.spec.ts
Normal file
60
server/test/medium/specs/sync/sync-complete.spec.ts
Normal file
|
|
@ -0,0 +1,60 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { DateTime } from 'luxon';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { SyncCheckpointRepository } from 'src/repositories/sync-checkpoint.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { toAck } from 'src/utils/sync';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
import { v7 } from 'uuid';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.SyncCompleteV1, () => {
|
||||
it('should work', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
|
||||
});
|
||||
|
||||
it('should detect an old checkpoint and send back a reset', async () => {
|
||||
const { auth, session, ctx } = await setup();
|
||||
const updateId = v7({ msecs: DateTime.now().minus({ days: 60 }).toMillis() });
|
||||
|
||||
await ctx.get(SyncCheckpointRepository).upsertAll([
|
||||
{
|
||||
type: SyncEntityType.SyncCompleteV1,
|
||||
sessionId: session.id,
|
||||
ack: toAck({ type: SyncEntityType.SyncCompleteV1, updateId }),
|
||||
},
|
||||
]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
|
||||
expect(response).toEqual([{ type: SyncEntityType.SyncResetV1, data: {}, ack: 'SyncResetV1|reset' }]);
|
||||
});
|
||||
|
||||
it('should not send back a reset if the checkpoint is recent', async () => {
|
||||
const { auth, session, ctx } = await setup();
|
||||
const updateId = v7({ msecs: DateTime.now().minus({ days: 7 }).toMillis() });
|
||||
|
||||
await ctx.get(SyncCheckpointRepository).upsertAll([
|
||||
{
|
||||
type: SyncEntityType.SyncCompleteV1,
|
||||
sessionId: session.id,
|
||||
ack: toAck({ type: SyncEntityType.SyncCompleteV1, updateId }),
|
||||
},
|
||||
]);
|
||||
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
|
||||
});
|
||||
});
|
||||
91
server/test/medium/specs/sync/sync-memory-asset.spec.ts
Normal file
91
server/test/medium/specs/sync/sync-memory-asset.spec.ts
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { MemoryRepository } from 'src/repositories/memory.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.MemoryToAssetV1, () => {
|
||||
it('should detect and sync a memory to asset relation', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { memory } = await ctx.newMemory({ ownerId: user.id });
|
||||
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
memoryId: memory.id,
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: 'MemoryToAssetV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted memory to asset relation', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { memory } = await ctx.newMemory({ ownerId: user.id });
|
||||
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
|
||||
await memoryRepo.removeAssetIds(memory.id, [asset.id]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.MemoryToAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
memoryId: memory.id,
|
||||
},
|
||||
type: 'MemoryToAssetDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
|
||||
});
|
||||
|
||||
it('should not sync a memory to asset relation or delete for an unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const { auth: auth2, user: user2 } = await ctx.newSyncAuthUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { memory } = await ctx.newMemory({ ownerId: user2.id });
|
||||
await ctx.newMemoryAsset({ memoryId: memory.id, assetId: asset.id });
|
||||
|
||||
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.MemoryToAssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
|
||||
|
||||
await memoryRepo.removeAssetIds(memory.id, [asset.id]);
|
||||
|
||||
expect(await ctx.syncStream(auth2, [SyncRequestType.MemoryToAssetsV1])).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.MemoryToAssetDeleteV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoryToAssetsV1]);
|
||||
});
|
||||
});
|
||||
115
server/test/medium/specs/sync/sync-memory.spec.ts
Normal file
115
server/test/medium/specs/sync/sync-memory.spec.ts
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { MemoryRepository } from 'src/repositories/memory.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.MemoryV1, () => {
|
||||
it('should detect and sync the first memory with the right properties', async () => {
|
||||
const { auth, user: user1, ctx } = await setup();
|
||||
const { memory } = await ctx.newMemory({ ownerId: user1.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
id: memory.id,
|
||||
createdAt: expect.any(String),
|
||||
updatedAt: expect.any(String),
|
||||
deletedAt: memory.deletedAt,
|
||||
type: memory.type,
|
||||
data: memory.data,
|
||||
hideAt: memory.hideAt,
|
||||
showAt: memory.showAt,
|
||||
seenAt: memory.seenAt,
|
||||
memoryAt: expect.any(String),
|
||||
isSaved: memory.isSaved,
|
||||
ownerId: memory.ownerId,
|
||||
},
|
||||
type: 'MemoryV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted memory', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const { memory } = await ctx.newMemory({ ownerId: user.id });
|
||||
await memoryRepo.delete(memory.id);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
memoryId: memory.id,
|
||||
},
|
||||
type: 'MemoryDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
|
||||
});
|
||||
|
||||
it('should sync a memory and then an update to that same memory', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const { memory } = await ctx.newMemory({ ownerId: user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: memory.id }),
|
||||
type: 'MemoryV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await memoryRepo.update(memory.id, { seenAt: new Date() });
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.MemoriesV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: memory.id }),
|
||||
type: 'MemoryV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
|
||||
});
|
||||
|
||||
it('should not sync a memory or a memory delete for an unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const memoryRepo = ctx.get(MemoryRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { memory } = await ctx.newMemory({ ownerId: user2.id });
|
||||
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
|
||||
await memoryRepo.delete(memory.id);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.MemoriesV1]);
|
||||
});
|
||||
});
|
||||
253
server/test/medium/specs/sync/sync-partner-asset-exif.spec.ts
Normal file
253
server/test/medium/specs/sync/sync-partner-asset-exif.spec.ts
Normal file
|
|
@ -0,0 +1,253 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB, wait } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.PartnerAssetExifsV1, () => {
|
||||
it('should detect and sync the first partner asset exif', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
city: null,
|
||||
country: null,
|
||||
dateTimeOriginal: null,
|
||||
description: '',
|
||||
exifImageHeight: null,
|
||||
exifImageWidth: null,
|
||||
exposureTime: null,
|
||||
fNumber: null,
|
||||
fileSizeInByte: null,
|
||||
focalLength: null,
|
||||
fps: null,
|
||||
iso: null,
|
||||
latitude: null,
|
||||
lensModel: null,
|
||||
longitude: null,
|
||||
make: 'Canon',
|
||||
model: null,
|
||||
modifyDate: null,
|
||||
orientation: null,
|
||||
profileDescription: null,
|
||||
projectionType: null,
|
||||
rating: null,
|
||||
state: null,
|
||||
timeZone: null,
|
||||
},
|
||||
type: SyncEntityType.PartnerAssetExifV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should not sync partner asset exif for own user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should not sync partner asset exif for unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await ctx.newExif({ assetId: asset.id, make: 'Canon' });
|
||||
const { session } = await ctx.newSession({ userId: user3.id });
|
||||
const authUser3 = factory.auth({ session, user: user3 });
|
||||
|
||||
await expect(ctx.syncStream(authUser3, [SyncRequestType.AssetExifsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetExifV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should backfill partner asset exif when a partner shared their library with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await ctx.newExif({ assetId: assetUser3.id, make: 'Canon' });
|
||||
await wait(2);
|
||||
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: assetUser2.id, make: 'Canon' });
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
expect(response).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: assetUser2.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetExifV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]),
|
||||
);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: assetUser3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetExifBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should handle partners with users ids lower than a uuidv7', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser({ id: '00d4c0af-7695-4cf2-85e6-415eeaf449cb' });
|
||||
const { user: user3 } = await ctx.newUser({ id: '00e4c0af-7695-4cf2-85e6-415eeaf449cb' });
|
||||
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await ctx.newExif({ assetId: assetUser3.id, make: 'assetUser3' });
|
||||
await wait(2);
|
||||
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: assetUser2.id, make: 'assetUser2' });
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: assetUser2.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetExifV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
// This checks that our ack upsert is correct
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.stringMatching(new RegExp(String.raw`${SyncEntityType.PartnerAssetExifBackfillV1}\|.+?\|.+`)),
|
||||
data: expect.objectContaining({
|
||||
assetId: assetUser3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetExifBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.PartnerAssetExifBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
});
|
||||
|
||||
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await ctx.newExif({ assetId: assetUser3.id, make: 'assetUser3' });
|
||||
await wait(2);
|
||||
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newExif({ assetId: assetUser2.id, make: 'assetUser2' });
|
||||
await wait(2);
|
||||
const { asset: asset2User3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await ctx.newExif({ assetId: asset2User3.id, make: 'asset2User3' });
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: assetUser2.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetExifV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.stringMatching(new RegExp(String.raw`${SyncEntityType.PartnerAssetExifBackfillV1}\|.+?\|.+`)),
|
||||
data: expect.objectContaining({
|
||||
assetId: assetUser3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetExifBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.PartnerAssetExifBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
assetId: asset2User3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetExifV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetExifsV1]);
|
||||
});
|
||||
});
|
||||
279
server/test/medium/specs/sync/sync-partner-asset.spec.ts
Normal file
279
server/test/medium/specs/sync/sync-partner-asset.spec.ts
Normal file
|
|
@ -0,0 +1,279 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { AssetRepository } from 'src/repositories/asset.repository';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB, wait } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.PartnerAssetsV1, () => {
|
||||
it('should detect and sync the first partner asset', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
|
||||
const originalFileName = 'firstPartnerAsset';
|
||||
const checksum = '1115vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const thumbhash = '2225vHcVkZzNp3Q9G+FEA0nu6zUbGb4Tj4UOXkN0wRA=';
|
||||
const date = new Date().toISOString();
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({
|
||||
ownerId: user2.id,
|
||||
originalFileName,
|
||||
checksum: Buffer.from(checksum, 'base64'),
|
||||
thumbhash: Buffer.from(thumbhash, 'base64'),
|
||||
fileCreatedAt: date,
|
||||
fileModifiedAt: date,
|
||||
localDateTime: date,
|
||||
deletedAt: null,
|
||||
duration: '0:10:00.00000',
|
||||
libraryId: null,
|
||||
});
|
||||
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
id: asset.id,
|
||||
ownerId: asset.ownerId,
|
||||
originalFileName,
|
||||
thumbhash,
|
||||
checksum,
|
||||
deletedAt: null,
|
||||
fileCreatedAt: date,
|
||||
fileModifiedAt: date,
|
||||
isFavorite: false,
|
||||
localDateTime: date,
|
||||
type: asset.type,
|
||||
visibility: asset.visibility,
|
||||
duration: asset.duration,
|
||||
isEdited: asset.isEdited,
|
||||
stackId: null,
|
||||
livePhotoVideoId: null,
|
||||
libraryId: asset.libraryId,
|
||||
width: null,
|
||||
height: null,
|
||||
},
|
||||
type: SyncEntityType.PartnerAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted partner asset', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
await assetRepo.remove(asset);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
assetId: asset.id,
|
||||
},
|
||||
type: SyncEntityType.PartnerAssetDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
});
|
||||
|
||||
it('should not sync a deleted partner asset due to a user delete', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
await ctx.newAsset({ ownerId: user2.id });
|
||||
await userRepo.delete({ id: user2.id }, true);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
});
|
||||
|
||||
it('should not sync a deleted partner asset due to a partner delete (unshare)', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const partnerRepo = ctx.get(PartnerRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newAsset({ ownerId: user2.id });
|
||||
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.PartnerAssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await partnerRepo.remove(partner);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
});
|
||||
|
||||
it('should not sync an asset or asset delete for own user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: auth.user.id });
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
|
||||
await assetRepo.remove(asset);
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
});
|
||||
|
||||
it('should not sync an asset or asset delete for unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const assetRepo = ctx.get(AssetRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { session } = await ctx.newSession({ userId: user2.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const auth2 = factory.auth({ session, user: user2 });
|
||||
|
||||
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
|
||||
await assetRepo.remove(asset);
|
||||
|
||||
await expect(ctx.syncStream(auth2, [SyncRequestType.AssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetDeleteV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
});
|
||||
|
||||
it('should backfill partner assets when a partner shared their library with you', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await wait(2);
|
||||
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: assetUser2.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: assetUser3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.PartnerAssetBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
});
|
||||
|
||||
it('should only backfill partner assets created prior to the current partner asset checkpoint', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { asset: assetUser3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await wait(2);
|
||||
const { asset: assetUser2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await wait(2);
|
||||
const { asset: asset2User3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: assetUser2.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: assetUser3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.PartnerAssetBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: asset2User3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerAssetV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerAssetsV1]);
|
||||
});
|
||||
});
|
||||
247
server/test/medium/specs/sync/sync-partner-stack.spec.ts
Normal file
247
server/test/medium/specs/sync/sync-partner-stack.spec.ts
Normal file
|
|
@ -0,0 +1,247 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { StackRepository } from 'src/repositories/stack.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB, wait } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncRequestType.PartnerStacksV1, () => {
|
||||
it('should detect and sync the first partner stack', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
id: stack.id,
|
||||
ownerId: stack.ownerId,
|
||||
createdAt: (stack.createdAt as Date).toISOString(),
|
||||
updatedAt: (stack.updatedAt as Date).toISOString(),
|
||||
primaryAssetId: stack.primaryAssetId,
|
||||
},
|
||||
type: SyncEntityType.PartnerStackV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted partner stack', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
|
||||
await stackRepo.delete(stack.id);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.stringContaining('PartnerStackDeleteV1'),
|
||||
data: {
|
||||
stackId: stack.id,
|
||||
},
|
||||
type: SyncEntityType.PartnerStackDeleteV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
});
|
||||
|
||||
it('should not sync a deleted partner stack due to a user delete', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newStack({ ownerId: user2.id }, [asset.id]);
|
||||
await userRepo.delete({ id: user2.id }, true);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
});
|
||||
|
||||
it('should not sync a deleted partner stack due to a partner delete (unshare)', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const partnerRepo = ctx.get(PartnerRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
await ctx.newStack({ ownerId: user2.id }, [asset.id]);
|
||||
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.PartnerStackV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await partnerRepo.remove(partner);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
});
|
||||
|
||||
it('should not sync a stack or stack delete for own user', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset.id]);
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: user.id });
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.StackV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
await stackRepo.delete(stack.id);
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.StacksV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.StackDeleteV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
});
|
||||
|
||||
it('should not sync a stack or stack delete for unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { session } = await ctx.newSession({ userId: user2.id });
|
||||
const { asset } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset.id]);
|
||||
const auth2 = factory.auth({ session, user: user2 });
|
||||
|
||||
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.StackV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
|
||||
await stackRepo.delete(stack.id);
|
||||
|
||||
await expect(ctx.syncStream(auth2, [SyncRequestType.StacksV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.StackDeleteV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
});
|
||||
|
||||
it('should backfill partner stacks when a partner shared their library with you', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { asset: asset3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
const { stack: stack3 } = await ctx.newStack({ ownerId: user3.id }, [asset3.id]);
|
||||
await wait(2);
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { stack: stack2 } = await ctx.newStack({ ownerId: user2.id }, [asset2.id]);
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.stringContaining('PartnerStackV1'),
|
||||
data: expect.objectContaining({
|
||||
id: stack2.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerStackV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.newPartner({ sharedById: user3.id, sharedWithId: user.id });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.PartnerStackBackfillV1),
|
||||
data: expect.objectContaining({
|
||||
id: stack3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerStackBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.PartnerStackBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
});
|
||||
|
||||
it('should only backfill partner stacks created prior to the current partner stack checkpoint', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { asset: asset3 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
const { stack: stack3 } = await ctx.newStack({ ownerId: user3.id }, [asset3.id]);
|
||||
await wait(2);
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { stack: stack2 } = await ctx.newStack({ ownerId: user2.id }, [asset2.id]);
|
||||
await wait(2);
|
||||
const { asset: asset4 } = await ctx.newAsset({ ownerId: user3.id });
|
||||
const { stack: stack4 } = await ctx.newStack({ ownerId: user3.id }, [asset4.id]);
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.PartnerStackV1),
|
||||
data: expect.objectContaining({
|
||||
id: stack2.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerStackV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await ctx.newPartner({ sharedById: user3.id, sharedWithId: auth.user.id });
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: stack3.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerStackBackfillV1,
|
||||
},
|
||||
{
|
||||
ack: expect.stringContaining(SyncEntityType.PartnerStackBackfillV1),
|
||||
data: {},
|
||||
type: SyncEntityType.SyncAckV1,
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: stack4.id,
|
||||
}),
|
||||
type: SyncEntityType.PartnerStackV1,
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnerStacksV1]);
|
||||
});
|
||||
});
|
||||
178
server/test/medium/specs/sync/sync-partner.spec.ts
Normal file
178
server/test/medium/specs/sync/sync-partner.spec.ts
Normal file
|
|
@ -0,0 +1,178 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { PartnerRepository } from 'src/repositories/partner.repository';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.PartnerV1, () => {
|
||||
it('should detect and sync the first partner', async () => {
|
||||
const { auth, user: user1, ctx } = await setup();
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
inTimeline: partner.inTimeline,
|
||||
sharedById: partner.sharedById,
|
||||
sharedWithId: partner.sharedWithId,
|
||||
},
|
||||
type: 'PartnerV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted partner', async () => {
|
||||
const { auth, user: user1, ctx } = await setup();
|
||||
|
||||
const partnerRepo = ctx.get(PartnerRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
|
||||
await partnerRepo.remove(partner);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
sharedById: partner.sharedById,
|
||||
sharedWithId: partner.sharedWithId,
|
||||
},
|
||||
type: 'PartnerDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a partner share both to and from another user', async () => {
|
||||
const { auth, user: user1, ctx } = await setup();
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { partner: partner1 } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
|
||||
const { partner: partner2 } = await ctx.newPartner({ sharedById: user1.id, sharedWithId: user2.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
inTimeline: partner1.inTimeline,
|
||||
sharedById: partner1.sharedById,
|
||||
sharedWithId: partner1.sharedWithId,
|
||||
},
|
||||
type: 'PartnerV1',
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
inTimeline: partner2.inTimeline,
|
||||
sharedById: partner2.sharedById,
|
||||
sharedWithId: partner2.sharedWithId,
|
||||
},
|
||||
type: 'PartnerV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
|
||||
});
|
||||
|
||||
it('should sync a partner and then an update to that same partner', async () => {
|
||||
const { auth, user: user1, ctx } = await setup();
|
||||
|
||||
const partnerRepo = ctx.get(PartnerRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user1.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
inTimeline: partner.inTimeline,
|
||||
sharedById: partner.sharedById,
|
||||
sharedWithId: partner.sharedWithId,
|
||||
},
|
||||
type: 'PartnerV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
const updated = await partnerRepo.update(
|
||||
{ sharedById: partner.sharedById, sharedWithId: partner.sharedWithId },
|
||||
{ inTimeline: true },
|
||||
);
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.PartnersV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
inTimeline: updated.inTimeline,
|
||||
sharedById: updated.sharedById,
|
||||
sharedWithId: updated.sharedWithId,
|
||||
},
|
||||
type: 'PartnerV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
|
||||
});
|
||||
|
||||
it('should not sync a partner or partner delete for an unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
|
||||
const partnerRepo = ctx.get(PartnerRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { user: user3 } = await ctx.newUser();
|
||||
const { partner } = await ctx.newPartner({ sharedById: user2.id, sharedWithId: user3.id });
|
||||
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
|
||||
await partnerRepo.remove(partner);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
|
||||
});
|
||||
|
||||
it('should not sync a partner delete after a user is deleted', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
await ctx.newPartner({ sharedById: user2.id, sharedWithId: auth.user.id });
|
||||
await userRepo.delete({ id: user2.id }, true);
|
||||
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PartnersV1]);
|
||||
});
|
||||
});
|
||||
93
server/test/medium/specs/sync/sync-person.spec.ts
Normal file
93
server/test/medium/specs/sync/sync-person.spec.ts
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { PersonRepository } from 'src/repositories/person.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { factory } from 'test/small.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.PersonV1, () => {
|
||||
it('should detect and sync the first person', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const { person } = await ctx.newPerson({ ownerId: auth.user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PeopleV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({
|
||||
id: person.id,
|
||||
name: person.name,
|
||||
isHidden: person.isHidden,
|
||||
birthDate: person.birthDate,
|
||||
faceAssetId: person.faceAssetId,
|
||||
isFavorite: person.isFavorite,
|
||||
ownerId: auth.user.id,
|
||||
color: person.color,
|
||||
}),
|
||||
type: 'PersonV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted person', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const personRepo = ctx.get(PersonRepository);
|
||||
const { person } = await ctx.newPerson({ ownerId: auth.user.id });
|
||||
await personRepo.delete([person.id]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.PeopleV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
personId: person.id,
|
||||
},
|
||||
type: 'PersonDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
|
||||
});
|
||||
|
||||
it('should not sync a person or person delete for an unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const personRepo = ctx.get(PersonRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { session } = await ctx.newSession({ userId: user2.id });
|
||||
const { person } = await ctx.newPerson({ ownerId: user2.id });
|
||||
const auth2 = factory.auth({ session, user: user2 });
|
||||
|
||||
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.PersonV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
|
||||
|
||||
await personRepo.delete([person.id]);
|
||||
|
||||
expect(await ctx.syncStream(auth2, [SyncRequestType.PeopleV1])).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.PersonDeleteV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.PeopleV1]);
|
||||
});
|
||||
});
|
||||
94
server/test/medium/specs/sync/sync-reset.spec.ts
Normal file
94
server/test/medium/specs/sync/sync-reset.spec.ts
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { SessionRepository } from 'src/repositories/session.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.SyncResetV1, () => {
|
||||
it('should work', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.AssetsV1]);
|
||||
});
|
||||
|
||||
it('should detect a pending sync reset', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
|
||||
await ctx.get(SessionRepository).update(auth.session!.id, {
|
||||
isPendingSyncReset: true,
|
||||
});
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
|
||||
expect(response).toEqual([{ type: SyncEntityType.SyncResetV1, data: {}, ack: 'SyncResetV1|reset' }]);
|
||||
});
|
||||
|
||||
it('should not send other dtos when a reset is pending', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.get(SessionRepository).update(auth.session!.id, {
|
||||
isPendingSyncReset: true,
|
||||
});
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1])).resolves.toEqual([
|
||||
{ type: SyncEntityType.SyncResetV1, data: {}, ack: 'SyncResetV1|reset' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should allow resetting a pending reset when requesting changes ', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
await ctx.get(SessionRepository).update(auth.session!.id, {
|
||||
isPendingSyncReset: true,
|
||||
});
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.AssetsV1], true)).resolves.toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
|
||||
it('should reset the sync progress', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
await ctx.newAsset({ ownerId: user.id });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await ctx.get(SessionRepository).update(auth.session!.id, {
|
||||
isPendingSyncReset: true,
|
||||
});
|
||||
|
||||
const resetResponse = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
|
||||
|
||||
await ctx.syncAckAll(auth, resetResponse);
|
||||
|
||||
const postResetResponse = await ctx.syncStream(auth, [SyncRequestType.AssetsV1]);
|
||||
expect(postResetResponse).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.AssetV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
114
server/test/medium/specs/sync/sync-stack.spec.ts
Normal file
114
server/test/medium/specs/sync/sync-stack.spec.ts
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { StackRepository } from 'src/repositories/stack.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.StackV1, () => {
|
||||
it('should detect and sync the first stack', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.stringContaining('StackV1'),
|
||||
data: {
|
||||
id: stack.id,
|
||||
createdAt: (stack.createdAt as Date).toISOString(),
|
||||
updatedAt: (stack.updatedAt as Date).toISOString(),
|
||||
primaryAssetId: stack.primaryAssetId,
|
||||
ownerId: stack.ownerId,
|
||||
},
|
||||
type: 'StackV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted stack', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
|
||||
await stackRepo.delete(stack.id);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.stringContaining('StackDeleteV1'),
|
||||
data: { stackId: stack.id },
|
||||
type: 'StackDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
|
||||
});
|
||||
|
||||
it('should sync a stack and then an update to that same stack', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user.id });
|
||||
const { stack } = await ctx.newStack({ ownerId: user.id }, [asset1.id, asset2.id]);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
|
||||
expect(response).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.StackV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await stackRepo.update(stack.id, { primaryAssetId: asset2.id });
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.StacksV1]);
|
||||
expect(newResponse).toEqual([
|
||||
expect.objectContaining({ type: SyncEntityType.StackV1 }),
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.stringContaining('StackV1'),
|
||||
data: expect.objectContaining({ id: stack.id, primaryAssetId: asset2.id }),
|
||||
type: 'StackV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, newResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
|
||||
});
|
||||
|
||||
it('should not sync a stack or stack delete for an unrelated user', async () => {
|
||||
const { auth, ctx } = await setup();
|
||||
const stackRepo = ctx.get(StackRepository);
|
||||
const { user: user2 } = await ctx.newUser();
|
||||
const { asset: asset1 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { asset: asset2 } = await ctx.newAsset({ ownerId: user2.id });
|
||||
const { stack } = await ctx.newStack({ ownerId: user2.id }, [asset1.id, asset2.id]);
|
||||
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
|
||||
await stackRepo.delete(stack.id);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.StacksV1]);
|
||||
});
|
||||
});
|
||||
19
server/test/medium/specs/sync/sync-types.spec.ts
Normal file
19
server/test/medium/specs/sync/sync-types.spec.ts
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
import { SyncRequestType } from 'src/enum';
|
||||
import { SYNC_TYPES_ORDER } from 'src/services/sync.service';
|
||||
|
||||
describe('types', () => {
|
||||
it('should have all the types in the ordering variable', () => {
|
||||
for (const key in SyncRequestType) {
|
||||
expect(SYNC_TYPES_ORDER).includes(key);
|
||||
}
|
||||
|
||||
expect(SYNC_TYPES_ORDER.length).toBe(Object.keys(SyncRequestType).length);
|
||||
});
|
||||
|
||||
it('should ensure album follows albums assets', () => {
|
||||
const albumIndex = SYNC_TYPES_ORDER.indexOf(SyncRequestType.AlbumsV1);
|
||||
const albumAssetsIndex = SYNC_TYPES_ORDER.indexOf(SyncRequestType.AlbumAssetsV1);
|
||||
|
||||
expect(albumIndex).toBeGreaterThan(albumAssetsIndex);
|
||||
});
|
||||
});
|
||||
125
server/test/medium/specs/sync/sync-user-metadata.spec.ts
Normal file
125
server/test/medium/specs/sync/sync-user-metadata.spec.ts
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType, UserMetadataKey } from 'src/enum';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.UserMetadataV1, () => {
|
||||
it('should detect and sync new user metadata', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
key: UserMetadataKey.Onboarding,
|
||||
userId: user.id,
|
||||
value: { isOnboarded: true },
|
||||
},
|
||||
type: 'UserMetadataV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UserMetadataV1]);
|
||||
});
|
||||
|
||||
it('should update user metadata', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
key: UserMetadataKey.Onboarding,
|
||||
userId: user.id,
|
||||
value: { isOnboarded: true },
|
||||
},
|
||||
type: 'UserMetadataV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: false } });
|
||||
|
||||
const updatedResponse = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
|
||||
expect(updatedResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
key: UserMetadataKey.Onboarding,
|
||||
userId: user.id,
|
||||
value: { isOnboarded: false },
|
||||
},
|
||||
type: 'UserMetadataV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, updatedResponse);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UserMetadataV1]);
|
||||
});
|
||||
});
|
||||
|
||||
describe(SyncEntityType.UserMetadataDeleteV1, () => {
|
||||
it('should delete and sync user metadata', async () => {
|
||||
const { auth, user, ctx } = await setup();
|
||||
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
await userRepo.upsertMetadata(user.id, { key: UserMetadataKey.Onboarding, value: { isOnboarded: true } });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.UserMetadataV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
key: UserMetadataKey.Onboarding,
|
||||
userId: user.id,
|
||||
value: { isOnboarded: true },
|
||||
},
|
||||
type: 'UserMetadataV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
await userRepo.deleteMetadata(auth.user.id, UserMetadataKey.Onboarding);
|
||||
|
||||
await expect(ctx.syncStream(auth, [SyncRequestType.UserMetadataV1])).resolves.toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
userId: user.id,
|
||||
key: UserMetadataKey.Onboarding,
|
||||
},
|
||||
type: 'UserMetadataDeleteV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
137
server/test/medium/specs/sync/sync-user.spec.ts
Normal file
137
server/test/medium/specs/sync/sync-user.spec.ts
Normal file
|
|
@ -0,0 +1,137 @@
|
|||
import { Kysely } from 'kysely';
|
||||
import { SyncEntityType, SyncRequestType } from 'src/enum';
|
||||
import { UserRepository } from 'src/repositories/user.repository';
|
||||
import { DB } from 'src/schema';
|
||||
import { SyncTestContext } from 'test/medium.factory';
|
||||
import { getKyselyDB } from 'test/utils';
|
||||
|
||||
let defaultDatabase: Kysely<DB>;
|
||||
|
||||
const setup = async (db?: Kysely<DB>) => {
|
||||
const ctx = new SyncTestContext(db || defaultDatabase);
|
||||
const { auth, user, session } = await ctx.newSyncAuthUser();
|
||||
return { auth, user, session, ctx };
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
defaultDatabase = await getKyselyDB();
|
||||
});
|
||||
|
||||
describe(SyncEntityType.UserV1, () => {
|
||||
it('should detect and sync the first user', async () => {
|
||||
const { auth, ctx } = await setup(await getKyselyDB());
|
||||
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
const user = await userRepo.get(auth.user.id, { withDeleted: false });
|
||||
if (!user) {
|
||||
expect.fail('First user should exist');
|
||||
}
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
deletedAt: user.deletedAt,
|
||||
email: user.email,
|
||||
hasProfileImage: user.profileImagePath !== '',
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
avatarColor: user.avatarColor,
|
||||
profileChangedAt: user.profileChangedAt.toISOString(),
|
||||
},
|
||||
type: 'UserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a soft deleted user', async () => {
|
||||
const { auth, ctx } = await setup(await getKyselyDB());
|
||||
|
||||
const { user: deleted } = await ctx.newUser({ deletedAt: new Date().toISOString() });
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
|
||||
|
||||
expect(response).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: auth.user.id }),
|
||||
type: 'UserV1',
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: deleted.id }),
|
||||
type: 'UserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]),
|
||||
);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
|
||||
});
|
||||
|
||||
it('should detect and sync a deleted user', async () => {
|
||||
const { auth, user: authUser, ctx } = await setup(await getKyselyDB());
|
||||
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
|
||||
const { user } = await ctx.newUser();
|
||||
await userRepo.delete({ id: user.id }, true);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: {
|
||||
userId: user.id,
|
||||
},
|
||||
type: 'UserDeleteV1',
|
||||
},
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: authUser.id }),
|
||||
type: 'UserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
await ctx.assertSyncIsComplete(auth, [SyncRequestType.UsersV1]);
|
||||
});
|
||||
|
||||
it('should sync a user and then an update to that same user', async () => {
|
||||
const { auth, user, ctx } = await setup(await getKyselyDB());
|
||||
|
||||
const userRepo = ctx.get(UserRepository);
|
||||
|
||||
const response = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
|
||||
expect(response).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: user.id }),
|
||||
type: 'UserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
|
||||
await ctx.syncAckAll(auth, response);
|
||||
|
||||
const updated = await userRepo.update(auth.user.id, { name: 'new name' });
|
||||
|
||||
const newResponse = await ctx.syncStream(auth, [SyncRequestType.UsersV1]);
|
||||
expect(newResponse).toEqual([
|
||||
{
|
||||
ack: expect.any(String),
|
||||
data: expect.objectContaining({ id: user.id, name: updated.name }),
|
||||
type: 'UserV1',
|
||||
},
|
||||
expect.objectContaining({ type: SyncEntityType.SyncCompleteV1 }),
|
||||
]);
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue