mirror of
https://github.com/docmost/docmost.git
synced 2026-05-07 14:43:06 +08:00
Base WIP
This commit is contained in:
@@ -15,4 +15,25 @@ export enum EventName {
|
||||
WORKSPACE_CREATED = 'workspace.created',
|
||||
WORKSPACE_UPDATED = 'workspace.updated',
|
||||
WORKSPACE_DELETED = 'workspace.deleted',
|
||||
|
||||
BASE_CREATED = 'base.created',
|
||||
BASE_UPDATED = 'base.updated',
|
||||
BASE_DELETED = 'base.deleted',
|
||||
|
||||
BASE_ROW_CREATED = 'base.row.created',
|
||||
BASE_ROW_UPDATED = 'base.row.updated',
|
||||
BASE_ROW_DELETED = 'base.row.deleted',
|
||||
BASE_ROW_RESTORED = 'base.row.restored',
|
||||
BASE_ROW_REORDERED = 'base.row.reordered',
|
||||
|
||||
BASE_PROPERTY_CREATED = 'base.property.created',
|
||||
BASE_PROPERTY_UPDATED = 'base.property.updated',
|
||||
BASE_PROPERTY_DELETED = 'base.property.deleted',
|
||||
BASE_PROPERTY_REORDERED = 'base.property.reordered',
|
||||
|
||||
BASE_VIEW_CREATED = 'base.view.created',
|
||||
BASE_VIEW_UPDATED = 'base.view.updated',
|
||||
BASE_VIEW_DELETED = 'base.view.deleted',
|
||||
|
||||
BASE_SCHEMA_BUMPED = 'base.schema.bumped',
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { BullModule } from '@nestjs/bullmq';
|
||||
import { BaseController } from './controllers/base.controller';
|
||||
import { BasePropertyController } from './controllers/base-property.controller';
|
||||
import { BaseRowController } from './controllers/base-row.controller';
|
||||
@@ -7,15 +8,37 @@ import { BaseService } from './services/base.service';
|
||||
import { BasePropertyService } from './services/base-property.service';
|
||||
import { BaseRowService } from './services/base-row.service';
|
||||
import { BaseViewService } from './services/base-view.service';
|
||||
import { BaseQueueProcessor } from './processors/base-queue.processor';
|
||||
import { BaseWsService } from './realtime/base-ws.service';
|
||||
import { BaseWsConsumers } from './realtime/base-ws-consumers';
|
||||
import { BasePresenceService } from './realtime/base-presence.service';
|
||||
import { QueueName } from '../../integrations/queue/constants';
|
||||
|
||||
@Module({
|
||||
imports: [BullModule.registerQueue({ name: QueueName.BASE_QUEUE })],
|
||||
controllers: [
|
||||
BaseController,
|
||||
BasePropertyController,
|
||||
BaseRowController,
|
||||
BaseViewController,
|
||||
],
|
||||
providers: [BaseService, BasePropertyService, BaseRowService, BaseViewService],
|
||||
exports: [BaseService, BasePropertyService, BaseRowService, BaseViewService],
|
||||
providers: [
|
||||
BaseService,
|
||||
BasePropertyService,
|
||||
BaseRowService,
|
||||
BaseViewService,
|
||||
BaseQueueProcessor,
|
||||
BasePresenceService,
|
||||
BaseWsService,
|
||||
BaseWsConsumers,
|
||||
],
|
||||
exports: [
|
||||
BaseService,
|
||||
BasePropertyService,
|
||||
BaseRowService,
|
||||
BaseViewService,
|
||||
BaseWsService,
|
||||
BasePresenceService,
|
||||
],
|
||||
})
|
||||
export class BaseModule {}
|
||||
|
||||
@@ -33,7 +33,7 @@ export type BasePropertyTypeValue =
|
||||
export const BASE_PROPERTY_TYPES = Object.values(BasePropertyType);
|
||||
|
||||
export const choiceSchema = z.object({
|
||||
id: z.string().uuid(),
|
||||
id: z.uuid(),
|
||||
name: z.string().min(1),
|
||||
color: z.string(),
|
||||
category: z.enum(['todo', 'inProgress', 'complete']).optional(),
|
||||
@@ -42,10 +42,10 @@ export const choiceSchema = z.object({
|
||||
export const selectTypeOptionsSchema = z
|
||||
.object({
|
||||
choices: z.array(choiceSchema).default([]),
|
||||
choiceOrder: z.array(z.string().uuid()).default([]),
|
||||
choiceOrder: z.array(z.uuid()).default([]),
|
||||
disableColors: z.boolean().optional(),
|
||||
defaultValue: z
|
||||
.union([z.string().uuid(), z.array(z.string().uuid())])
|
||||
.union([z.uuid(), z.array(z.uuid())])
|
||||
.nullable()
|
||||
.optional(),
|
||||
})
|
||||
@@ -147,21 +147,21 @@ export function parseTypeOptions(
|
||||
const cellValueSchemaMap: Partial<Record<BasePropertyTypeValue, z.ZodType>> = {
|
||||
[BasePropertyType.TEXT]: z.string(),
|
||||
[BasePropertyType.NUMBER]: z.number(),
|
||||
[BasePropertyType.SELECT]: z.string().uuid(),
|
||||
[BasePropertyType.STATUS]: z.string().uuid(),
|
||||
[BasePropertyType.MULTI_SELECT]: z.array(z.string().uuid()),
|
||||
[BasePropertyType.SELECT]: z.uuid(),
|
||||
[BasePropertyType.STATUS]: z.uuid(),
|
||||
[BasePropertyType.MULTI_SELECT]: z.array(z.uuid()),
|
||||
[BasePropertyType.DATE]: z.string(),
|
||||
[BasePropertyType.PERSON]: z.union([z.string().uuid(), z.array(z.string().uuid())]),
|
||||
[BasePropertyType.PERSON]: z.union([z.uuid(), z.array(z.uuid())]),
|
||||
[BasePropertyType.FILE]: z.array(z.object({
|
||||
id: z.string().uuid(),
|
||||
id: z.uuid(),
|
||||
fileName: z.string(),
|
||||
mimeType: z.string().optional(),
|
||||
fileSize: z.number().optional(),
|
||||
filePath: z.string().optional(),
|
||||
})),
|
||||
[BasePropertyType.CHECKBOX]: z.boolean(),
|
||||
[BasePropertyType.URL]: z.string().url(),
|
||||
[BasePropertyType.EMAIL]: z.string().email(),
|
||||
[BasePropertyType.URL]: z.url(),
|
||||
[BasePropertyType.EMAIL]: z.email(),
|
||||
};
|
||||
|
||||
export function getCellValueSchema(
|
||||
@@ -181,15 +181,83 @@ export function validateCellValue(
|
||||
return schema.safeParse(value);
|
||||
}
|
||||
|
||||
/*
|
||||
* Resolution context for conversions where the source type stores IDs
|
||||
* (select / multiSelect: choice uuid; person: user uuid; file: attachment
|
||||
* uuid). Callers must always supply this — the only invoker is the
|
||||
* `BASE_TYPE_CONVERSION` BullMQ worker, which builds the context per
|
||||
* chunk of rows (see `tasks/base-type-conversion.task.ts`).
|
||||
*/
|
||||
export type CellConversionContext = {
|
||||
fromTypeOptions?: unknown;
|
||||
userNames?: Map<string, string>;
|
||||
attachmentNames?: Map<string, string>;
|
||||
};
|
||||
|
||||
function resolveChoiceName(
|
||||
typeOptions: unknown,
|
||||
id: unknown,
|
||||
): string | undefined {
|
||||
if (!typeOptions || typeof typeOptions !== 'object') return undefined;
|
||||
const choices = (typeOptions as any).choices;
|
||||
if (!Array.isArray(choices)) return undefined;
|
||||
const match = choices.find((c: any) => c?.id === String(id));
|
||||
return typeof match?.name === 'string' ? match.name : undefined;
|
||||
}
|
||||
|
||||
export function attemptCellConversion(
|
||||
fromType: BasePropertyTypeValue,
|
||||
toType: BasePropertyTypeValue,
|
||||
value: unknown,
|
||||
ctx: CellConversionContext,
|
||||
): { converted: boolean; value: unknown } {
|
||||
if (value === null || value === undefined) {
|
||||
return { converted: true, value: null };
|
||||
}
|
||||
|
||||
// Resolve IDs to display strings before any direct parse. `select → text`
|
||||
// and `multiSelect → text` would otherwise short-circuit on z.string()
|
||||
// parsing the UUID itself and return the raw UUID instead of the name.
|
||||
if (toType === BasePropertyType.TEXT) {
|
||||
if (
|
||||
fromType === BasePropertyType.SELECT ||
|
||||
fromType === BasePropertyType.STATUS
|
||||
) {
|
||||
const name = resolveChoiceName(ctx.fromTypeOptions, value);
|
||||
return { converted: true, value: name ?? '' };
|
||||
}
|
||||
if (fromType === BasePropertyType.MULTI_SELECT && Array.isArray(value)) {
|
||||
const parts = value
|
||||
.map((v) => resolveChoiceName(ctx.fromTypeOptions, v))
|
||||
.filter((v): v is string => typeof v === 'string' && v.length > 0);
|
||||
return { converted: true, value: parts.join(', ') };
|
||||
}
|
||||
if (fromType === BasePropertyType.PERSON && ctx.userNames) {
|
||||
const ids = Array.isArray(value) ? value : [value];
|
||||
const parts = ids
|
||||
.map((v) => ctx.userNames!.get(String(v)))
|
||||
.filter((v): v is string => typeof v === 'string' && v.length > 0);
|
||||
return { converted: true, value: parts.join(', ') };
|
||||
}
|
||||
if (fromType === BasePropertyType.FILE && Array.isArray(value)) {
|
||||
const parts = value
|
||||
.map((f: any) => {
|
||||
if (f && typeof f === 'object') {
|
||||
if (typeof f.fileName === 'string') return f.fileName;
|
||||
if (typeof f.id === 'string' && ctx.attachmentNames) {
|
||||
return ctx.attachmentNames.get(f.id);
|
||||
}
|
||||
}
|
||||
if (typeof f === 'string' && ctx.attachmentNames) {
|
||||
return ctx.attachmentNames.get(f);
|
||||
}
|
||||
return undefined;
|
||||
})
|
||||
.filter((v): v is string => typeof v === 'string' && v.length > 0);
|
||||
return { converted: true, value: parts.join(', ') };
|
||||
}
|
||||
}
|
||||
|
||||
const targetSchema = cellValueSchemaMap[toType];
|
||||
if (!targetSchema) {
|
||||
return { converted: false, value: null };
|
||||
@@ -247,35 +315,66 @@ export function attemptCellConversion(
|
||||
}
|
||||
|
||||
export const viewSortSchema = z.object({
|
||||
propertyId: z.string().uuid(),
|
||||
propertyId: z.uuid(),
|
||||
direction: z.enum(['asc', 'desc']),
|
||||
});
|
||||
|
||||
export const viewFilterSchema = z.object({
|
||||
propertyId: z.string().uuid(),
|
||||
operator: z.enum([
|
||||
'equals',
|
||||
'notEquals',
|
||||
/*
|
||||
* View-stored filter shape matches the engine's predicate tree (see
|
||||
* `core/base/engine/schema.zod.ts`). No legacy flat-array / operator-name
|
||||
* variants are accepted — stored view configs use `op` (eq / neq / gt /
|
||||
* lt / contains / ncontains / ...) and nested and/or groups.
|
||||
*/
|
||||
const viewFilterConditionSchema = z.object({
|
||||
propertyId: z.uuid(),
|
||||
op: z.enum([
|
||||
'eq',
|
||||
'neq',
|
||||
'gt',
|
||||
'gte',
|
||||
'lt',
|
||||
'lte',
|
||||
'contains',
|
||||
'notContains',
|
||||
'ncontains',
|
||||
'startsWith',
|
||||
'endsWith',
|
||||
'isEmpty',
|
||||
'isNotEmpty',
|
||||
'greaterThan',
|
||||
'lessThan',
|
||||
'before',
|
||||
'after',
|
||||
'onOrBefore',
|
||||
'onOrAfter',
|
||||
'any',
|
||||
'none',
|
||||
'all',
|
||||
]),
|
||||
value: z.unknown().optional(),
|
||||
});
|
||||
|
||||
type ViewFilterCondition = z.infer<typeof viewFilterConditionSchema>;
|
||||
type ViewFilterGroup = {
|
||||
op: 'and' | 'or';
|
||||
children: Array<ViewFilterCondition | ViewFilterGroup>;
|
||||
};
|
||||
|
||||
const viewFilterNodeSchema: z.ZodType<ViewFilterCondition | ViewFilterGroup> =
|
||||
z.lazy(() => z.union([viewFilterConditionSchema, viewFilterGroupSchema]));
|
||||
|
||||
const viewFilterGroupSchema: z.ZodType<ViewFilterGroup> = z.lazy(() =>
|
||||
z.object({
|
||||
op: z.enum(['and', 'or']),
|
||||
children: z.array(viewFilterNodeSchema),
|
||||
}),
|
||||
);
|
||||
|
||||
export const viewConfigSchema = z
|
||||
.object({
|
||||
sorts: z.array(viewSortSchema).optional(),
|
||||
filters: z.array(viewFilterSchema).optional(),
|
||||
visiblePropertyIds: z.array(z.string().uuid()).optional(),
|
||||
hiddenPropertyIds: z.array(z.string().uuid()).optional(),
|
||||
filter: viewFilterGroupSchema.optional(),
|
||||
visiblePropertyIds: z.array(z.uuid()).optional(),
|
||||
hiddenPropertyIds: z.array(z.uuid()).optional(),
|
||||
propertyWidths: z.record(z.string(), z.number().positive()).optional(),
|
||||
propertyOrder: z.array(z.string().uuid()).optional(),
|
||||
propertyOrder: z.array(z.uuid()).optional(),
|
||||
})
|
||||
.passthrough();
|
||||
|
||||
|
||||
@@ -52,12 +52,16 @@ export class BasePropertyController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.basePropertyService.create(workspace.id, dto);
|
||||
return this.basePropertyService.create(workspace.id, dto, user.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('update')
|
||||
async update(@Body() dto: UpdatePropertyDto, @AuthUser() user: User) {
|
||||
async update(
|
||||
@Body() dto: UpdatePropertyDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
@@ -68,12 +72,16 @@ export class BasePropertyController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.basePropertyService.update(dto);
|
||||
return this.basePropertyService.update(dto, workspace.id, user.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('delete')
|
||||
async delete(@Body() dto: DeletePropertyDto, @AuthUser() user: User) {
|
||||
async delete(
|
||||
@Body() dto: DeletePropertyDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
@@ -84,12 +92,16 @@ export class BasePropertyController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
await this.basePropertyService.delete(dto);
|
||||
await this.basePropertyService.delete(dto, workspace.id, user.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('reorder')
|
||||
async reorder(@Body() dto: ReorderPropertyDto, @AuthUser() user: User) {
|
||||
async reorder(
|
||||
@Body() dto: ReorderPropertyDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
@@ -100,6 +112,6 @@ export class BasePropertyController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
await this.basePropertyService.reorder(dto);
|
||||
await this.basePropertyService.reorder(dto, workspace.id, user.id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,58 +60,10 @@ export class BaseRowController {
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('info')
|
||||
async getRow(@Body() dto: RowIdDto, @AuthUser() user: User) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
|
||||
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Base)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.baseRowService.getRowInfo(dto.rowId, dto.baseId);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('update')
|
||||
async update(@Body() dto: UpdateRowDto, @AuthUser() user: User) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
|
||||
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.baseRowService.update(dto, user.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('delete')
|
||||
async delete(@Body() dto: DeleteRowDto, @AuthUser() user: User) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
|
||||
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
await this.baseRowService.delete(dto.rowId, dto.baseId);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('list')
|
||||
async list(
|
||||
@Body() dto: ListRowsDto,
|
||||
@Body() pagination: PaginationOptions,
|
||||
async getRow(
|
||||
@Body() dto: RowIdDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
@@ -123,12 +75,16 @@ export class BaseRowController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.baseRowService.list(dto, pagination);
|
||||
return this.baseRowService.getRowInfo(dto.rowId, dto.baseId, workspace.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('reorder')
|
||||
async reorder(@Body() dto: ReorderRowDto, @AuthUser() user: User) {
|
||||
@Post('update')
|
||||
async update(
|
||||
@Body() dto: UpdateRowDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
@@ -139,6 +95,67 @@ export class BaseRowController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
await this.baseRowService.reorder(dto);
|
||||
return this.baseRowService.update(dto, workspace.id, user.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('delete')
|
||||
async delete(
|
||||
@Body() dto: DeleteRowDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
|
||||
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
await this.baseRowService.delete(dto, workspace.id, user.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('list')
|
||||
async list(
|
||||
@Body() dto: ListRowsDto,
|
||||
@Body() pagination: PaginationOptions,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
|
||||
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Base)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.baseRowService.list(dto, pagination, workspace.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('reorder')
|
||||
async reorder(
|
||||
@Body() dto: ReorderRowDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
}
|
||||
|
||||
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
|
||||
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
await this.baseRowService.reorder(dto, workspace.id, user.id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,7 +54,11 @@ export class BaseViewController {
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('update')
|
||||
async update(@Body() dto: UpdateViewDto, @AuthUser() user: User) {
|
||||
async update(
|
||||
@Body() dto: UpdateViewDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
@@ -65,12 +69,16 @@ export class BaseViewController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.baseViewService.update(dto);
|
||||
return this.baseViewService.update(dto, workspace.id, user.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('delete')
|
||||
async delete(@Body() dto: DeleteViewDto, @AuthUser() user: User) {
|
||||
async delete(
|
||||
@Body() dto: DeleteViewDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
@@ -81,12 +89,16 @@ export class BaseViewController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
await this.baseViewService.delete(dto);
|
||||
await this.baseViewService.delete(dto, workspace.id, user.id);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('list')
|
||||
async list(@Body() dto: BaseIdDto, @AuthUser() user: User) {
|
||||
async list(
|
||||
@Body() dto: BaseIdDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const base = await this.baseRepo.findById(dto.baseId);
|
||||
if (!base) {
|
||||
throw new NotFoundException('Base not found');
|
||||
@@ -97,6 +109,6 @@ export class BaseViewController {
|
||||
throw new ForbiddenException();
|
||||
}
|
||||
|
||||
return this.baseViewService.listByBaseId(dto.baseId);
|
||||
return this.baseViewService.listByBaseId(dto.baseId, workspace.id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,12 @@ export class CreateRowDto {
|
||||
cells?: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
@IsUUID()
|
||||
afterRowId?: string;
|
||||
|
||||
// Echoed back in the socket event so the originating client can skip
|
||||
// replaying its own write.
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
requestId?: string;
|
||||
}
|
||||
|
||||
@@ -27,6 +27,10 @@ export class UpdatePropertyDto {
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
typeOptions?: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
requestId?: string;
|
||||
}
|
||||
|
||||
export class DeletePropertyDto {
|
||||
@@ -35,6 +39,10 @@ export class DeletePropertyDto {
|
||||
|
||||
@IsUUID()
|
||||
baseId: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
requestId?: string;
|
||||
}
|
||||
|
||||
export class ReorderPropertyDto {
|
||||
@@ -47,4 +55,8 @@ export class ReorderPropertyDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
position: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
requestId?: string;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,16 @@
|
||||
import { IsNotEmpty, IsObject, IsOptional, IsString, IsUUID, IsArray, ValidateNested } from 'class-validator';
|
||||
import {
|
||||
IsIn,
|
||||
IsNotEmpty,
|
||||
IsObject,
|
||||
IsOptional,
|
||||
IsString,
|
||||
IsUUID,
|
||||
IsArray,
|
||||
ValidateNested,
|
||||
} from 'class-validator';
|
||||
import { Type } from 'class-transformer';
|
||||
// `filter` / `search` shapes are validated by the engine's Zod schemas
|
||||
// at the service boundary (`core/base/engine/schema.zod.ts`).
|
||||
|
||||
export class UpdateRowDto {
|
||||
@IsUUID()
|
||||
@@ -10,6 +21,10 @@ export class UpdateRowDto {
|
||||
|
||||
@IsObject()
|
||||
cells: Record<string, unknown>;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
requestId?: string;
|
||||
}
|
||||
|
||||
export class DeleteRowDto {
|
||||
@@ -18,6 +33,10 @@ export class DeleteRowDto {
|
||||
|
||||
@IsUUID()
|
||||
baseId: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
requestId?: string;
|
||||
}
|
||||
|
||||
export class RowIdDto {
|
||||
@@ -28,25 +47,12 @@ export class RowIdDto {
|
||||
baseId: string;
|
||||
}
|
||||
|
||||
class FilterDto {
|
||||
@IsUUID()
|
||||
propertyId: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
operator: string;
|
||||
|
||||
@IsOptional()
|
||||
value?: unknown;
|
||||
}
|
||||
|
||||
class SortDto {
|
||||
@IsUUID()
|
||||
propertyId: string;
|
||||
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
direction: string;
|
||||
@IsIn(['asc', 'desc'])
|
||||
direction: 'asc' | 'desc';
|
||||
}
|
||||
|
||||
export class ListRowsDto {
|
||||
@@ -57,17 +63,22 @@ export class ListRowsDto {
|
||||
@IsUUID()
|
||||
viewId?: string;
|
||||
|
||||
// Compound filter tree. Shape validated by the engine's Zod schema at
|
||||
// the service boundary.
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => FilterDto)
|
||||
filters?: FilterDto[];
|
||||
@IsObject()
|
||||
filter?: unknown;
|
||||
|
||||
@IsOptional()
|
||||
@IsArray()
|
||||
@ValidateNested({ each: true })
|
||||
@Type(() => SortDto)
|
||||
sorts?: SortDto[];
|
||||
|
||||
// `{ query, mode? }` — Zod-validated at the service boundary.
|
||||
@IsOptional()
|
||||
@IsObject()
|
||||
search?: unknown;
|
||||
}
|
||||
|
||||
export class ReorderRowDto {
|
||||
@@ -80,4 +91,8 @@ export class ReorderRowDto {
|
||||
@IsString()
|
||||
@IsNotEmpty()
|
||||
position: string;
|
||||
|
||||
@IsOptional()
|
||||
@IsString()
|
||||
requestId?: string;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,111 @@
|
||||
import { BadRequestException } from '@nestjs/common';
|
||||
import { SortBuild, TailKey } from './sort';
|
||||
|
||||
type ValueType = 'numeric' | 'date' | 'bool' | 'text';
|
||||
|
||||
// Hard cap on decoded cursor size so a tampered cursor can't force a large
|
||||
// JSON parse. Real cursors are <1KB (a handful of field values).
|
||||
const MAX_CURSOR_DECODED_BYTES = 4096;
|
||||
|
||||
/*
|
||||
* Null-safe cursor encoder. The previous encoder used a literal string
|
||||
* sentinel `__null__` for NULLs, which could collide with real cell
|
||||
* values. This encoder never sees NULL because sort expressions are
|
||||
* sentinel-wrapped (see sort.ts). It also represents ±Infinity
|
||||
* explicitly so JSON round-tripping is lossless.
|
||||
*/
|
||||
|
||||
export function makeCursor(sorts: SortBuild[], tailKeys: TailKey[]) {
|
||||
const types = new Map<string, ValueType>();
|
||||
for (const s of sorts) types.set(s.key, s.valueType);
|
||||
for (const k of tailKeys) types.set(k, 'text');
|
||||
|
||||
return {
|
||||
encodeCursor(values: Array<[string, unknown]>): string {
|
||||
const payload: Record<string, string> = {};
|
||||
for (const [k, v] of values) {
|
||||
payload[k] = encodeValue(v, types.get(k) ?? 'text');
|
||||
}
|
||||
return Buffer.from(JSON.stringify(payload), 'utf8').toString('base64url');
|
||||
},
|
||||
|
||||
decodeCursor(
|
||||
cursor: string,
|
||||
fieldNames: string[],
|
||||
): Record<string, string> {
|
||||
let parsed: Record<string, string>;
|
||||
try {
|
||||
parsed = JSON.parse(
|
||||
Buffer.from(cursor, 'base64url').toString('utf8'),
|
||||
);
|
||||
} catch {
|
||||
throw new BadRequestException('Invalid cursor');
|
||||
}
|
||||
if (typeof parsed !== 'object' || parsed === null) {
|
||||
throw new BadRequestException('Invalid cursor payload');
|
||||
}
|
||||
const out: Record<string, string> = {};
|
||||
for (const name of fieldNames) {
|
||||
if (!(name in parsed)) {
|
||||
throw new BadRequestException(`Cursor missing field: ${name}`);
|
||||
}
|
||||
out[name] = parsed[name];
|
||||
}
|
||||
return out;
|
||||
},
|
||||
|
||||
parseCursor(decoded: Record<string, string>): Record<string, unknown> {
|
||||
const out: Record<string, unknown> = {};
|
||||
for (const [k, raw] of Object.entries(decoded)) {
|
||||
out[k] = decodeValue(raw, types.get(k) ?? 'text');
|
||||
}
|
||||
return out;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function encodeValue(value: unknown, type: ValueType): string {
|
||||
if (type === 'numeric') {
|
||||
if (value === null || value === undefined) return '';
|
||||
const n = typeof value === 'number' ? value : parseFloat(String(value));
|
||||
if (n === Number.POSITIVE_INFINITY || String(value) === 'Infinity') {
|
||||
return 'inf';
|
||||
}
|
||||
if (n === Number.NEGATIVE_INFINITY || String(value) === '-Infinity') {
|
||||
return '-inf';
|
||||
}
|
||||
if (Number.isNaN(n)) return '';
|
||||
return String(n);
|
||||
}
|
||||
if (type === 'date') {
|
||||
if (value === null || value === undefined) return '';
|
||||
if (value instanceof Date) return value.toISOString();
|
||||
const s = String(value);
|
||||
if (s === 'infinity') return 'inf';
|
||||
if (s === '-infinity') return '-inf';
|
||||
return s;
|
||||
}
|
||||
if (type === 'bool') {
|
||||
return value ? '1' : '0';
|
||||
}
|
||||
return value == null ? '' : String(value);
|
||||
}
|
||||
|
||||
function decodeValue(raw: string, type: ValueType): unknown {
|
||||
if (type === 'numeric') {
|
||||
if (raw === 'inf') return Number.POSITIVE_INFINITY;
|
||||
if (raw === '-inf') return Number.NEGATIVE_INFINITY;
|
||||
if (raw === '') return null;
|
||||
return parseFloat(raw);
|
||||
}
|
||||
if (type === 'date') {
|
||||
if (raw === 'inf') return 'infinity';
|
||||
if (raw === '-inf') return '-infinity';
|
||||
if (raw === '') return null;
|
||||
return raw;
|
||||
}
|
||||
if (type === 'bool') {
|
||||
return raw === '1';
|
||||
}
|
||||
return raw;
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
import { SelectQueryBuilder } from 'kysely';
|
||||
import { DB } from '@docmost/db/types/db';
|
||||
import { BaseRow } from '@docmost/db/types/entity.types';
|
||||
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
|
||||
import {
|
||||
CursorPaginationResult,
|
||||
executeWithCursorPagination,
|
||||
} from '@docmost/db/pagination/cursor-pagination';
|
||||
import { FilterNode, SearchSpec, SortSpec } from './schema.zod';
|
||||
import { buildWhere, PropertySchema } from './predicate';
|
||||
import { buildSorts, CURSOR_TAIL_KEYS, SortBuild } from './sort';
|
||||
import { buildSearch } from './search';
|
||||
import { makeCursor } from './cursor';
|
||||
|
||||
export type EngineListOpts = {
|
||||
filter?: FilterNode;
|
||||
sorts?: SortSpec[];
|
||||
search?: SearchSpec;
|
||||
schema: PropertySchema;
|
||||
pagination: PaginationOptions;
|
||||
};
|
||||
|
||||
/*
|
||||
* Top-level orchestrator. Callers (repos, services) provide a base
|
||||
* Kysely query already scoped to the target base + workspace + alive
|
||||
* rows; this adds search/filter/sort clauses and runs cursor pagination.
|
||||
*/
|
||||
export async function runListQuery(
|
||||
base: SelectQueryBuilder<DB, 'baseRows', any>,
|
||||
opts: EngineListOpts,
|
||||
): Promise<CursorPaginationResult<BaseRow>> {
|
||||
let qb = base;
|
||||
|
||||
if (opts.search) {
|
||||
const spec = opts.search;
|
||||
qb = qb.where((eb) => buildSearch(eb, spec));
|
||||
}
|
||||
|
||||
if (opts.filter) {
|
||||
const filter = opts.filter;
|
||||
qb = qb.where((eb) => buildWhere(eb, filter, opts.schema));
|
||||
}
|
||||
|
||||
const sortBuilds: SortBuild[] =
|
||||
opts.sorts && opts.sorts.length > 0
|
||||
? buildSorts(opts.sorts, opts.schema)
|
||||
: [];
|
||||
|
||||
for (const sb of sortBuilds) {
|
||||
qb = qb.select(sb.expression.as(sb.key)) as SelectQueryBuilder<
|
||||
DB,
|
||||
'baseRows',
|
||||
any
|
||||
>;
|
||||
}
|
||||
|
||||
const cursor = makeCursor(sortBuilds, CURSOR_TAIL_KEYS);
|
||||
|
||||
const fields = [
|
||||
...sortBuilds.map((sb) => ({
|
||||
expression: sb.expression,
|
||||
direction: sb.direction,
|
||||
key: sb.key,
|
||||
})),
|
||||
{
|
||||
expression: 'position' as const,
|
||||
direction: 'asc' as const,
|
||||
key: 'position' as const,
|
||||
},
|
||||
{
|
||||
expression: 'id' as const,
|
||||
direction: 'asc' as const,
|
||||
key: 'id' as const,
|
||||
},
|
||||
];
|
||||
|
||||
return executeWithCursorPagination(qb as any, {
|
||||
perPage: opts.pagination.limit,
|
||||
cursor: opts.pagination.cursor,
|
||||
beforeCursor: opts.pagination.beforeCursor,
|
||||
fields: fields as any,
|
||||
encodeCursor: cursor.encodeCursor as any,
|
||||
decodeCursor: cursor.decodeCursor as any,
|
||||
parseCursor: cursor.parseCursor as any,
|
||||
}) as unknown as Promise<CursorPaginationResult<BaseRow>>;
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
import { sql, RawBuilder } from 'kysely';
|
||||
|
||||
/*
|
||||
* Parameterised extractors wrapping the SQL helper functions installed
|
||||
* by the bases-hardening migration. PropertyId always binds as a
|
||||
* parameter — never string-interpolated. These replace every
|
||||
* `sql.raw('cells->>...')` site in the old repo.
|
||||
*/
|
||||
|
||||
export function textCell(propertyId: string): RawBuilder<string> {
|
||||
return sql<string>`base_cell_text(cells, ${propertyId}::uuid)`;
|
||||
}
|
||||
|
||||
export function numericCell(propertyId: string): RawBuilder<number> {
|
||||
return sql<number>`base_cell_numeric(cells, ${propertyId}::uuid)`;
|
||||
}
|
||||
|
||||
export function dateCell(propertyId: string): RawBuilder<Date> {
|
||||
return sql<Date>`base_cell_timestamptz(cells, ${propertyId}::uuid)`;
|
||||
}
|
||||
|
||||
export function boolCell(propertyId: string): RawBuilder<boolean> {
|
||||
return sql<boolean>`base_cell_bool(cells, ${propertyId}::uuid)`;
|
||||
}
|
||||
|
||||
export function arrayCell(propertyId: string): RawBuilder<unknown> {
|
||||
return sql<unknown>`base_cell_array(cells, ${propertyId}::uuid)`;
|
||||
}
|
||||
|
||||
export function escapeIlike(value: string): string {
|
||||
return value.replace(/[%_\\]/g, '\\$&');
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
export {
|
||||
MAX_FILTER_DEPTH,
|
||||
MAX_FILTER_NODES,
|
||||
MAX_SORTS,
|
||||
conditionSchema,
|
||||
filterGroupSchema,
|
||||
filterNodeSchema,
|
||||
listQuerySchema,
|
||||
operatorSchema,
|
||||
searchSchema,
|
||||
sortSpecSchema,
|
||||
sortsSchema,
|
||||
validateFilterTree,
|
||||
} from './schema.zod';
|
||||
export type {
|
||||
Condition,
|
||||
FilterGroup,
|
||||
FilterNode,
|
||||
ListQuery,
|
||||
Operator,
|
||||
SearchSpec,
|
||||
SortSpec,
|
||||
} from './schema.zod';
|
||||
|
||||
export {
|
||||
PropertyKind,
|
||||
SYSTEM_COLUMN,
|
||||
isSystemType,
|
||||
propertyKind,
|
||||
} from './kinds';
|
||||
export type { PropertyKindValue } from './kinds';
|
||||
|
||||
export { buildWhere } from './predicate';
|
||||
export type { PropertySchema } from './predicate';
|
||||
|
||||
export { buildSorts, CURSOR_TAIL_KEYS } from './sort';
|
||||
export type { SortBuild, TailKey } from './sort';
|
||||
|
||||
export { makeCursor } from './cursor';
|
||||
|
||||
export { buildSearch } from './search';
|
||||
|
||||
export { runListQuery } from './engine';
|
||||
export type { EngineListOpts } from './engine';
|
||||
@@ -0,0 +1,57 @@
|
||||
import { BasePropertyType } from '../base.schemas';
|
||||
|
||||
export const PropertyKind = {
|
||||
TEXT: 'text',
|
||||
NUMERIC: 'numeric',
|
||||
DATE: 'date',
|
||||
BOOL: 'bool',
|
||||
SELECT: 'select',
|
||||
MULTI: 'multi',
|
||||
PERSON: 'person',
|
||||
FILE: 'file',
|
||||
SYS_USER: 'sys_user',
|
||||
} as const;
|
||||
|
||||
export type PropertyKindValue = (typeof PropertyKind)[keyof typeof PropertyKind];
|
||||
|
||||
export function propertyKind(type: string): PropertyKindValue | null {
|
||||
switch (type) {
|
||||
case BasePropertyType.TEXT:
|
||||
case BasePropertyType.URL:
|
||||
case BasePropertyType.EMAIL:
|
||||
return PropertyKind.TEXT;
|
||||
case BasePropertyType.NUMBER:
|
||||
return PropertyKind.NUMERIC;
|
||||
case BasePropertyType.DATE:
|
||||
case BasePropertyType.CREATED_AT:
|
||||
case BasePropertyType.LAST_EDITED_AT:
|
||||
return PropertyKind.DATE;
|
||||
case BasePropertyType.CHECKBOX:
|
||||
return PropertyKind.BOOL;
|
||||
case BasePropertyType.SELECT:
|
||||
case BasePropertyType.STATUS:
|
||||
return PropertyKind.SELECT;
|
||||
case BasePropertyType.MULTI_SELECT:
|
||||
return PropertyKind.MULTI;
|
||||
case BasePropertyType.PERSON:
|
||||
return PropertyKind.PERSON;
|
||||
case BasePropertyType.FILE:
|
||||
return PropertyKind.FILE;
|
||||
case BasePropertyType.LAST_EDITED_BY:
|
||||
return PropertyKind.SYS_USER;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// System property type → camelCase column name on `base_rows`.
|
||||
// Kysely camel-case plugin maps to snake_case in SQL.
|
||||
export const SYSTEM_COLUMN: Record<string, 'createdAt' | 'updatedAt' | 'lastUpdatedById'> = {
|
||||
[BasePropertyType.CREATED_AT]: 'createdAt',
|
||||
[BasePropertyType.LAST_EDITED_AT]: 'updatedAt',
|
||||
[BasePropertyType.LAST_EDITED_BY]: 'lastUpdatedById',
|
||||
};
|
||||
|
||||
export function isSystemType(type: string): boolean {
|
||||
return type in SYSTEM_COLUMN;
|
||||
}
|
||||
@@ -0,0 +1,404 @@
|
||||
import { Expression, ExpressionBuilder, sql, SqlBool } from 'kysely';
|
||||
import { DB } from '@docmost/db/types/db';
|
||||
import { BaseProperty } from '@docmost/db/types/entity.types';
|
||||
import { Condition, FilterNode } from './schema.zod';
|
||||
import { PropertyKind, propertyKind, SYSTEM_COLUMN } from './kinds';
|
||||
import {
|
||||
arrayCell,
|
||||
boolCell,
|
||||
dateCell,
|
||||
escapeIlike,
|
||||
numericCell,
|
||||
textCell,
|
||||
} from './extractors';
|
||||
|
||||
export type PropertySchema = Map<
|
||||
string,
|
||||
Pick<BaseProperty, 'id' | 'type' | 'typeOptions'>
|
||||
>;
|
||||
|
||||
type Eb = ExpressionBuilder<DB, 'baseRows'>;
|
||||
|
||||
const TRUE = sql<SqlBool>`TRUE`;
|
||||
const FALSE = sql<SqlBool>`FALSE`;
|
||||
|
||||
export function buildWhere(
|
||||
eb: Eb,
|
||||
node: FilterNode,
|
||||
schema: PropertySchema,
|
||||
): Expression<SqlBool> {
|
||||
if ('children' in node) {
|
||||
if (node.children.length === 0) return TRUE;
|
||||
const built = node.children.map((c) => buildWhere(eb, c, schema));
|
||||
return node.op === 'and' ? eb.and(built) : eb.or(built);
|
||||
}
|
||||
return buildCondition(eb, node, schema);
|
||||
}
|
||||
|
||||
function buildCondition(
|
||||
eb: Eb,
|
||||
cond: Condition,
|
||||
schema: PropertySchema,
|
||||
): Expression<SqlBool> {
|
||||
const prop = schema.get(cond.propertyId);
|
||||
if (!prop) return FALSE;
|
||||
|
||||
const sysCol = SYSTEM_COLUMN[prop.type];
|
||||
if (sysCol) return systemCondition(eb, sysCol, prop.type, cond);
|
||||
|
||||
const kind = propertyKind(prop.type);
|
||||
if (!kind) return FALSE;
|
||||
|
||||
switch (kind) {
|
||||
case PropertyKind.TEXT:
|
||||
return textCondition(eb, cond);
|
||||
case PropertyKind.NUMERIC:
|
||||
return numericCondition(eb, cond);
|
||||
case PropertyKind.DATE:
|
||||
return dateCondition(eb, cond);
|
||||
case PropertyKind.BOOL:
|
||||
return boolCondition(eb, cond);
|
||||
case PropertyKind.SELECT:
|
||||
return selectCondition(eb, cond);
|
||||
case PropertyKind.MULTI:
|
||||
return multiCondition(eb, cond);
|
||||
case PropertyKind.PERSON:
|
||||
return personCondition(eb, cond, prop);
|
||||
case PropertyKind.FILE:
|
||||
return arrayOfIdsCondition(eb, cond);
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
// --- per-kind handlers ------------------------------------------------
|
||||
|
||||
function textCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
const expr = textCell(cond.propertyId);
|
||||
const val = cond.value;
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '=', ''),
|
||||
]);
|
||||
case 'isNotEmpty':
|
||||
return eb.and([
|
||||
eb(expr as any, 'is not', null),
|
||||
eb(expr as any, '!=', ''),
|
||||
]);
|
||||
case 'eq':
|
||||
return val == null ? FALSE : eb(expr as any, '=', String(val));
|
||||
case 'neq':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '!=', String(val)),
|
||||
]);
|
||||
case 'contains':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb(expr as any, 'ilike', `%${escapeIlike(String(val))}%`);
|
||||
case 'ncontains':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, 'not ilike', `%${escapeIlike(String(val))}%`),
|
||||
]);
|
||||
case 'startsWith':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb(expr as any, 'ilike', `${escapeIlike(String(val))}%`);
|
||||
case 'endsWith':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb(expr as any, 'ilike', `%${escapeIlike(String(val))}`);
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
function numericCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
const expr = numericCell(cond.propertyId);
|
||||
const raw = cond.value;
|
||||
const num = raw == null ? null : Number(raw);
|
||||
const bad = num == null || Number.isNaN(num);
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb(expr as any, 'is', null);
|
||||
case 'isNotEmpty':
|
||||
return eb(expr as any, 'is not', null);
|
||||
case 'eq':
|
||||
return bad ? FALSE : eb(expr as any, '=', num);
|
||||
case 'neq':
|
||||
return bad
|
||||
? FALSE
|
||||
: eb.or([eb(expr as any, 'is', null), eb(expr as any, '!=', num)]);
|
||||
case 'gt':
|
||||
return bad ? FALSE : eb(expr as any, '>', num);
|
||||
case 'gte':
|
||||
return bad ? FALSE : eb(expr as any, '>=', num);
|
||||
case 'lt':
|
||||
return bad ? FALSE : eb(expr as any, '<', num);
|
||||
case 'lte':
|
||||
return bad ? FALSE : eb(expr as any, '<=', num);
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
function dateCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
const expr = dateCell(cond.propertyId);
|
||||
const raw = cond.value;
|
||||
const bad = raw == null || raw === '';
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb(expr as any, 'is', null);
|
||||
case 'isNotEmpty':
|
||||
return eb(expr as any, 'is not', null);
|
||||
case 'eq':
|
||||
return bad ? FALSE : eb(expr as any, '=', String(raw));
|
||||
case 'neq':
|
||||
return bad
|
||||
? FALSE
|
||||
: eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '!=', String(raw)),
|
||||
]);
|
||||
case 'before':
|
||||
return bad ? FALSE : eb(expr as any, '<', String(raw));
|
||||
case 'after':
|
||||
return bad ? FALSE : eb(expr as any, '>', String(raw));
|
||||
case 'onOrBefore':
|
||||
return bad ? FALSE : eb(expr as any, '<=', String(raw));
|
||||
case 'onOrAfter':
|
||||
return bad ? FALSE : eb(expr as any, '>=', String(raw));
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
function boolCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
const expr = boolCell(cond.propertyId);
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb(expr as any, 'is', null);
|
||||
case 'isNotEmpty':
|
||||
return eb(expr as any, 'is not', null);
|
||||
case 'eq':
|
||||
return cond.value == null
|
||||
? FALSE
|
||||
: eb(expr as any, '=', Boolean(cond.value));
|
||||
case 'neq':
|
||||
return cond.value == null
|
||||
? FALSE
|
||||
: eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '!=', Boolean(cond.value)),
|
||||
]);
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
function selectCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
// Cell stores a single option UUID as string. Use text extractor.
|
||||
const expr = textCell(cond.propertyId);
|
||||
const val = cond.value;
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '=', ''),
|
||||
]);
|
||||
case 'isNotEmpty':
|
||||
return eb.and([
|
||||
eb(expr as any, 'is not', null),
|
||||
eb(expr as any, '!=', ''),
|
||||
]);
|
||||
case 'eq':
|
||||
return val == null ? FALSE : eb(expr as any, '=', String(val));
|
||||
case 'neq':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '!=', String(val)),
|
||||
]);
|
||||
case 'any': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return FALSE;
|
||||
return eb(expr as any, 'in', arr);
|
||||
}
|
||||
case 'none': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return TRUE;
|
||||
return eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, 'not in', arr),
|
||||
]);
|
||||
}
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
function multiCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
return arrayOfIdsCondition(eb, cond);
|
||||
}
|
||||
|
||||
function personCondition(
|
||||
eb: Eb,
|
||||
cond: Condition,
|
||||
prop: Pick<BaseProperty, 'id' | 'type' | 'typeOptions'>,
|
||||
): Expression<SqlBool> {
|
||||
// Person cells may be stored as a single uuid or an array of uuids depending
|
||||
// on the property's `allowMultiple` option. Normalise to array semantics via
|
||||
// `base_cell_array` when it's stored as an array, else text.
|
||||
const allowMultiple = !!(prop.typeOptions as any)?.allowMultiple;
|
||||
if (allowMultiple) return arrayOfIdsCondition(eb, cond);
|
||||
|
||||
const expr = textCell(cond.propertyId);
|
||||
const val = cond.value;
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '=', ''),
|
||||
]);
|
||||
case 'isNotEmpty':
|
||||
return eb.and([
|
||||
eb(expr as any, 'is not', null),
|
||||
eb(expr as any, '!=', ''),
|
||||
]);
|
||||
case 'eq':
|
||||
return val == null ? FALSE : eb(expr as any, '=', String(val));
|
||||
case 'neq':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '!=', String(val)),
|
||||
]);
|
||||
case 'any': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return FALSE;
|
||||
return eb(expr as any, 'in', arr);
|
||||
}
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
function arrayOfIdsCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
const expr = arrayCell(cond.propertyId);
|
||||
const val = cond.value;
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
sql<SqlBool>`jsonb_array_length(${expr}) = 0`,
|
||||
]);
|
||||
case 'isNotEmpty':
|
||||
return eb.and([
|
||||
eb(expr as any, 'is not', null),
|
||||
sql<SqlBool>`jsonb_array_length(${expr}) > 0`,
|
||||
]);
|
||||
case 'any': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return FALSE;
|
||||
return sql<SqlBool>`${expr} ?| ${arr}`;
|
||||
}
|
||||
case 'all': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return TRUE;
|
||||
// `::text::jsonb` because postgres.js auto-detects JSON-shaped strings
|
||||
// as jsonb and re-encodes them, producing a jsonb *string* instead of
|
||||
// an array. Without the text hop, the containment check never matches.
|
||||
return sql<SqlBool>`${expr} @> ${JSON.stringify(arr)}::text::jsonb`;
|
||||
}
|
||||
case 'none': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return TRUE;
|
||||
return eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
sql<SqlBool>`NOT (${expr} ?| ${arr})`,
|
||||
]);
|
||||
}
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
function systemCondition(
|
||||
eb: Eb,
|
||||
column: 'createdAt' | 'updatedAt' | 'lastUpdatedById',
|
||||
propertyType: string,
|
||||
cond: Condition,
|
||||
): Expression<SqlBool> {
|
||||
const ref = eb.ref(column);
|
||||
const val = cond.value;
|
||||
|
||||
// lastEditedBy — UUID column; behaves like select (uuid equality, in, isEmpty).
|
||||
if (propertyType === 'lastEditedBy') {
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb(ref, 'is', null);
|
||||
case 'isNotEmpty':
|
||||
return eb(ref, 'is not', null);
|
||||
case 'eq':
|
||||
return val == null ? FALSE : eb(ref, '=', String(val));
|
||||
case 'neq':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb.or([eb(ref, 'is', null), eb(ref, '!=', String(val))]);
|
||||
case 'any': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return FALSE;
|
||||
return eb(ref, 'in', arr);
|
||||
}
|
||||
case 'none': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return TRUE;
|
||||
return eb.or([eb(ref, 'is', null), eb(ref, 'not in', arr)]);
|
||||
}
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
// createdAt / updatedAt — timestamptz columns (NOT NULL).
|
||||
const bad = val == null || val === '';
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return FALSE;
|
||||
case 'isNotEmpty':
|
||||
return TRUE;
|
||||
case 'eq':
|
||||
return bad ? FALSE : eb(ref, '=', String(val));
|
||||
case 'neq':
|
||||
return bad ? FALSE : eb(ref, '!=', String(val));
|
||||
case 'before':
|
||||
return bad ? FALSE : eb(ref, '<', String(val));
|
||||
case 'after':
|
||||
return bad ? FALSE : eb(ref, '>', String(val));
|
||||
case 'onOrBefore':
|
||||
return bad ? FALSE : eb(ref, '<=', String(val));
|
||||
case 'onOrAfter':
|
||||
return bad ? FALSE : eb(ref, '>=', String(val));
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
// --- utilities --------------------------------------------------------
|
||||
|
||||
function asStringArray(val: unknown): string[] {
|
||||
if (val == null) return [];
|
||||
if (Array.isArray(val)) return val.filter((v) => v != null).map(String);
|
||||
return [String(val)];
|
||||
}
|
||||
|
||||
export { TRUE as TRUE_EXPR, FALSE as FALSE_EXPR };
|
||||
@@ -0,0 +1,100 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export const MAX_FILTER_DEPTH = 5;
|
||||
export const MAX_FILTER_NODES = 50;
|
||||
export const MAX_SORTS = 5;
|
||||
|
||||
const uuid = z.uuid();
|
||||
|
||||
export const operatorSchema = z.enum([
|
||||
'eq',
|
||||
'neq',
|
||||
'gt',
|
||||
'gte',
|
||||
'lt',
|
||||
'lte',
|
||||
'contains',
|
||||
'ncontains',
|
||||
'startsWith',
|
||||
'endsWith',
|
||||
'isEmpty',
|
||||
'isNotEmpty',
|
||||
'before',
|
||||
'after',
|
||||
'onOrBefore',
|
||||
'onOrAfter',
|
||||
'any',
|
||||
'none',
|
||||
'all',
|
||||
]);
|
||||
|
||||
export type Operator = z.infer<typeof operatorSchema>;
|
||||
|
||||
export const conditionSchema = z.object({
|
||||
propertyId: uuid,
|
||||
op: operatorSchema,
|
||||
value: z.unknown().optional(),
|
||||
});
|
||||
|
||||
export type Condition = z.infer<typeof conditionSchema>;
|
||||
|
||||
export type FilterNode = Condition | FilterGroup;
|
||||
export type FilterGroup = {
|
||||
op: 'and' | 'or';
|
||||
children: FilterNode[];
|
||||
};
|
||||
|
||||
// Recursive Zod schema for grouped filter trees.
|
||||
export const filterNodeSchema: z.ZodType<FilterNode> = z.lazy(() =>
|
||||
z.union([conditionSchema, filterGroupSchema]),
|
||||
);
|
||||
|
||||
export const filterGroupSchema: z.ZodType<FilterGroup> = z.lazy(() =>
|
||||
z.object({
|
||||
op: z.enum(['and', 'or']),
|
||||
children: z.array(filterNodeSchema),
|
||||
}),
|
||||
);
|
||||
|
||||
// Count nodes + max depth to prevent pathological trees from reaching SQL.
|
||||
export function validateFilterTree(node: FilterNode): void {
|
||||
let nodes = 0;
|
||||
const walk = (n: FilterNode, depth: number) => {
|
||||
if (depth > MAX_FILTER_DEPTH) {
|
||||
throw new Error(`Filter tree exceeds max depth ${MAX_FILTER_DEPTH}`);
|
||||
}
|
||||
nodes += 1;
|
||||
if (nodes > MAX_FILTER_NODES) {
|
||||
throw new Error(`Filter tree exceeds max node count ${MAX_FILTER_NODES}`);
|
||||
}
|
||||
if ('children' in n) {
|
||||
for (const c of n.children) walk(c, depth + 1);
|
||||
}
|
||||
};
|
||||
walk(node, 0);
|
||||
}
|
||||
|
||||
export const sortSpecSchema = z.object({
|
||||
propertyId: uuid,
|
||||
direction: z.enum(['asc', 'desc']),
|
||||
});
|
||||
|
||||
export type SortSpec = z.infer<typeof sortSpecSchema>;
|
||||
|
||||
export const sortsSchema = z.array(sortSpecSchema).max(MAX_SORTS);
|
||||
|
||||
export const searchSchema = z.object({
|
||||
query: z.string().min(1).max(500),
|
||||
mode: z.enum(['trgm', 'fts']).default('trgm'),
|
||||
});
|
||||
|
||||
export type SearchSpec = z.infer<typeof searchSchema>;
|
||||
|
||||
// Top-level request DTO shape. The row controller DTO composes this.
|
||||
export const listQuerySchema = z.object({
|
||||
filter: filterGroupSchema.optional(),
|
||||
sorts: sortsSchema.optional(),
|
||||
search: searchSchema.optional(),
|
||||
});
|
||||
|
||||
export type ListQuery = z.infer<typeof listQuerySchema>;
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Expression, ExpressionBuilder, sql, SqlBool } from 'kysely';
|
||||
import { DB } from '@docmost/db/types/db';
|
||||
import { SearchSpec } from './schema.zod';
|
||||
|
||||
type Eb = ExpressionBuilder<DB, 'baseRows'>;
|
||||
|
||||
/*
|
||||
* `search_text` and `search_tsv` are maintained by the base_rows search
|
||||
* trigger installed in the bases-hardening migration. Both columns are
|
||||
* indexed — pg_trgm GIN for ILIKE and standard GIN for tsvector.
|
||||
*/
|
||||
|
||||
export function buildSearch(eb: Eb, spec: SearchSpec): Expression<SqlBool> {
|
||||
const q = spec.query.trim();
|
||||
if (!q) return sql<SqlBool>`TRUE`;
|
||||
|
||||
if (spec.mode === 'fts') {
|
||||
// Accent-insensitive match via f_unaccent (same helper the search
|
||||
// trigger uses when populating search_tsv / search_text).
|
||||
return sql<SqlBool>`search_tsv @@ plainto_tsquery('english', f_unaccent(${q}))`;
|
||||
}
|
||||
|
||||
// trigram ILIKE mode (default). escape %/_/\\ in user input so wildcards
|
||||
// can't be injected.
|
||||
const escaped = q.replace(/[%_\\]/g, '\\$&');
|
||||
return sql<SqlBool>`search_text ILIKE ${'%' + escaped + '%'}`;
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
import { RawBuilder, sql } from 'kysely';
|
||||
import { BaseProperty } from '@docmost/db/types/entity.types';
|
||||
import { SortSpec } from './schema.zod';
|
||||
import { PropertyKind, SYSTEM_COLUMN, propertyKind } from './kinds';
|
||||
import {
|
||||
boolCell,
|
||||
dateCell,
|
||||
numericCell,
|
||||
textCell,
|
||||
} from './extractors';
|
||||
import { PropertySchema } from './predicate';
|
||||
|
||||
/*
|
||||
* Builds sort expressions with sentinel wrapping so NULLs compare
|
||||
* deterministically at the end of the sort order. This avoids the
|
||||
* `__null__` string sentinel bug in the old cursor encoder: because the
|
||||
* sort expression never returns NULL, the cursor simply stores the
|
||||
* extracted value and keyset comparisons work natively.
|
||||
*/
|
||||
|
||||
export type SortBuild = {
|
||||
key: string; // alias used in cursor (s0, s1, ...)
|
||||
expression: RawBuilder<any>; // COALESCE-wrapped expression with sentinel
|
||||
direction: 'asc' | 'desc';
|
||||
valueType: 'numeric' | 'date' | 'text' | 'bool';
|
||||
};
|
||||
|
||||
export type TailKey = 'position' | 'id';
|
||||
|
||||
export const CURSOR_TAIL_KEYS: TailKey[] = ['position', 'id'];
|
||||
|
||||
export function buildSorts(
|
||||
sorts: SortSpec[],
|
||||
schema: PropertySchema,
|
||||
): SortBuild[] {
|
||||
const out: SortBuild[] = [];
|
||||
for (let i = 0; i < sorts.length; i++) {
|
||||
const s = sorts[i];
|
||||
const prop = schema.get(s.propertyId);
|
||||
if (!prop) continue;
|
||||
|
||||
const key = `s${i}`;
|
||||
const dir = s.direction;
|
||||
|
||||
const sysCol = SYSTEM_COLUMN[prop.type];
|
||||
if (sysCol) {
|
||||
out.push({
|
||||
key,
|
||||
expression: sql`${sql.ref(sysCol)}`,
|
||||
direction: dir,
|
||||
valueType: prop.type === 'lastEditedBy' ? 'text' : 'date',
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
const kind = propertyKind(prop.type);
|
||||
if (!kind) continue;
|
||||
|
||||
out.push(wrapWithSentinel(s.propertyId, kind, dir, key));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function wrapWithSentinel(
|
||||
propertyId: string,
|
||||
kind: Exclude<ReturnType<typeof propertyKind>, null>,
|
||||
direction: 'asc' | 'desc',
|
||||
key: string,
|
||||
): SortBuild {
|
||||
if (kind === PropertyKind.NUMERIC) {
|
||||
const sentinel =
|
||||
direction === 'asc'
|
||||
? sql`'Infinity'::numeric`
|
||||
: sql`'-Infinity'::numeric`;
|
||||
return {
|
||||
key,
|
||||
expression: sql`COALESCE(${numericCell(propertyId)}, ${sentinel})`,
|
||||
direction,
|
||||
valueType: 'numeric',
|
||||
};
|
||||
}
|
||||
if (kind === PropertyKind.DATE) {
|
||||
const sentinel =
|
||||
direction === 'asc'
|
||||
? sql`'infinity'::timestamptz`
|
||||
: sql`'-infinity'::timestamptz`;
|
||||
return {
|
||||
key,
|
||||
expression: sql`COALESCE(${dateCell(propertyId)}, ${sentinel})`,
|
||||
direction,
|
||||
valueType: 'date',
|
||||
};
|
||||
}
|
||||
if (kind === PropertyKind.BOOL) {
|
||||
// false < true. ASC NULLS LAST => null → true; DESC NULLS LAST => null → false.
|
||||
const sentinel = direction === 'asc' ? sql`TRUE` : sql`FALSE`;
|
||||
return {
|
||||
key,
|
||||
expression: sql`COALESCE(${boolCell(propertyId)}, ${sentinel})`,
|
||||
direction,
|
||||
valueType: 'bool',
|
||||
};
|
||||
}
|
||||
// TEXT / SELECT / MULTI / PERSON / FILE — sort by raw extracted text.
|
||||
const sentinel = direction === 'asc' ? sql`chr(1114111)` : sql`''`;
|
||||
return {
|
||||
key,
|
||||
expression: sql`COALESCE(${textCell(propertyId)}, ${sentinel})`,
|
||||
direction,
|
||||
valueType: 'text',
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
import { BaseProperty, BaseRow, BaseView } from '@docmost/db/types/entity.types';
|
||||
|
||||
/*
|
||||
* Domain event payloads emitted by the base services after each mutation
|
||||
* commits. `base-ws-consumers.ts` picks these up and fans them out onto
|
||||
* the appropriate socket.io room. `requestId` lets the originating client
|
||||
* skip replaying its own echo.
|
||||
*/
|
||||
|
||||
type BaseEventBase = {
|
||||
baseId: string;
|
||||
workspaceId: string;
|
||||
actorId?: string | null;
|
||||
requestId?: string | null;
|
||||
};
|
||||
|
||||
export type BaseRowCreatedEvent = BaseEventBase & { row: BaseRow };
|
||||
export type BaseRowUpdatedEvent = BaseEventBase & {
|
||||
rowId: string;
|
||||
patch: Record<string, unknown>;
|
||||
updatedCells: Record<string, unknown>;
|
||||
};
|
||||
export type BaseRowDeletedEvent = BaseEventBase & { rowId: string };
|
||||
export type BaseRowRestoredEvent = BaseEventBase & { rowId: string };
|
||||
export type BaseRowReorderedEvent = BaseEventBase & {
|
||||
rowId: string;
|
||||
position: string;
|
||||
};
|
||||
|
||||
export type BasePropertyCreatedEvent = BaseEventBase & {
|
||||
property: BaseProperty;
|
||||
};
|
||||
export type BasePropertyUpdatedEvent = BaseEventBase & {
|
||||
property: BaseProperty;
|
||||
schemaVersion: number;
|
||||
};
|
||||
export type BasePropertyDeletedEvent = BaseEventBase & { propertyId: string };
|
||||
export type BasePropertyReorderedEvent = BaseEventBase & {
|
||||
propertyId: string;
|
||||
position: string;
|
||||
};
|
||||
|
||||
export type BaseViewCreatedEvent = BaseEventBase & { view: BaseView };
|
||||
export type BaseViewUpdatedEvent = BaseEventBase & { view: BaseView };
|
||||
export type BaseViewDeletedEvent = BaseEventBase & { viewId: string };
|
||||
|
||||
export type BaseSchemaBumpedEvent = BaseEventBase & { schemaVersion: number };
|
||||
@@ -0,0 +1,177 @@
|
||||
import { Logger, OnModuleDestroy } from '@nestjs/common';
|
||||
import { OnWorkerEvent, Processor, WorkerHost } from '@nestjs/bullmq';
|
||||
import { EventEmitter2 } from '@nestjs/event-emitter';
|
||||
import { Job } from 'bullmq';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { executeTx } from '@docmost/db/utils';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
|
||||
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
|
||||
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
|
||||
import {
|
||||
IBaseCellGcJob,
|
||||
IBaseTypeConversionJob,
|
||||
} from '../../../integrations/queue/constants/queue.interface';
|
||||
import { processBaseTypeConversion } from '../tasks/base-type-conversion.task';
|
||||
import { processBaseCellGc } from '../tasks/base-cell-gc.task';
|
||||
import { EventName } from '../../../common/events/event.contants';
|
||||
import {
|
||||
BasePropertyUpdatedEvent,
|
||||
BaseSchemaBumpedEvent,
|
||||
} from '../events/base-events';
|
||||
|
||||
@Processor(QueueName.BASE_QUEUE)
|
||||
export class BaseQueueProcessor
|
||||
extends WorkerHost
|
||||
implements OnModuleDestroy
|
||||
{
|
||||
private readonly logger = new Logger(BaseQueueProcessor.name);
|
||||
|
||||
constructor(
|
||||
@InjectKysely() private readonly db: KyselyDB,
|
||||
private readonly baseRowRepo: BaseRowRepo,
|
||||
private readonly basePropertyRepo: BasePropertyRepo,
|
||||
private readonly baseRepo: BaseRepo,
|
||||
private readonly eventEmitter: EventEmitter2,
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
async process(job: Job): Promise<unknown> {
|
||||
switch (job.name) {
|
||||
case QueueJob.BASE_TYPE_CONVERSION: {
|
||||
const data = job.data as IBaseTypeConversionJob;
|
||||
// Cell rewrite + pending→live swap + schema_version bump share one
|
||||
// transaction so readers never see cells already in the new format
|
||||
// under a still-pending type (or vice versa).
|
||||
const { summary, schemaVersion } = await executeTx(
|
||||
this.db,
|
||||
async (trx) => {
|
||||
const s = await processBaseTypeConversion(
|
||||
this.db,
|
||||
this.baseRowRepo,
|
||||
data,
|
||||
{
|
||||
trx,
|
||||
progress: (processed) => job.updateProgress({ processed }),
|
||||
},
|
||||
);
|
||||
await this.basePropertyRepo.commitPendingTypeChange(
|
||||
data.propertyId,
|
||||
trx,
|
||||
);
|
||||
await this.basePropertyRepo.bumpSchemaVersion(data.propertyId, trx);
|
||||
const v = await this.baseRepo.bumpSchemaVersion(data.baseId, trx);
|
||||
return { summary: s, schemaVersion: v };
|
||||
},
|
||||
);
|
||||
|
||||
// Emit the property:updated first so clients drop the "Converting…"
|
||||
// badge and repaint headers with the new type, then schema:bumped
|
||||
// so they invalidate row caches to pick up migrated cells.
|
||||
const updated = await this.basePropertyRepo.findById(data.propertyId);
|
||||
if (updated) {
|
||||
const event: BasePropertyUpdatedEvent = {
|
||||
baseId: data.baseId,
|
||||
workspaceId: data.workspaceId,
|
||||
actorId: data.actorId ?? null,
|
||||
requestId: null,
|
||||
property: updated,
|
||||
schemaVersion: updated.schemaVersion,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
|
||||
}
|
||||
this.emitSchemaBumped(
|
||||
data.baseId,
|
||||
data.workspaceId,
|
||||
schemaVersion,
|
||||
data.actorId,
|
||||
);
|
||||
return summary;
|
||||
}
|
||||
case QueueJob.BASE_CELL_GC: {
|
||||
const data = job.data as IBaseCellGcJob;
|
||||
await processBaseCellGc(
|
||||
this.db,
|
||||
this.baseRowRepo,
|
||||
this.basePropertyRepo,
|
||||
data,
|
||||
);
|
||||
const schemaVersion = await this.baseRepo.bumpSchemaVersion(
|
||||
data.baseId,
|
||||
);
|
||||
this.emitSchemaBumped(data.baseId, data.workspaceId, schemaVersion);
|
||||
return;
|
||||
}
|
||||
default:
|
||||
this.logger.warn(`Unknown job: ${job.name}`);
|
||||
}
|
||||
}
|
||||
|
||||
private emitSchemaBumped(
|
||||
baseId: string,
|
||||
workspaceId: string,
|
||||
schemaVersion: number,
|
||||
actorId?: string,
|
||||
): void {
|
||||
const event: BaseSchemaBumpedEvent = {
|
||||
baseId,
|
||||
workspaceId,
|
||||
actorId: actorId ?? null,
|
||||
requestId: null,
|
||||
schemaVersion,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_SCHEMA_BUMPED, event);
|
||||
}
|
||||
|
||||
@OnWorkerEvent('active')
|
||||
onActive(job: Job) {
|
||||
this.logger.debug(`Processing ${job.name} job ${job.id}`);
|
||||
}
|
||||
|
||||
@OnWorkerEvent('failed')
|
||||
async onError(job: Job) {
|
||||
this.logger.error(
|
||||
`Error processing ${job.name} job ${job.id}. Reason: ${job.failedReason}`,
|
||||
);
|
||||
|
||||
// Clean up a stuck conversion so the column doesn't wedge in
|
||||
// "Converting…" forever. Cells remain under the original type because
|
||||
// the rewrite transaction rolled back.
|
||||
if (job.name === QueueJob.BASE_TYPE_CONVERSION) {
|
||||
const data = job.data as IBaseTypeConversionJob;
|
||||
try {
|
||||
await this.basePropertyRepo.clearPendingTypeChange(data.propertyId);
|
||||
const reverted = await this.basePropertyRepo.findById(data.propertyId);
|
||||
if (reverted) {
|
||||
const event: BasePropertyUpdatedEvent = {
|
||||
baseId: data.baseId,
|
||||
workspaceId: data.workspaceId,
|
||||
actorId: data.actorId ?? null,
|
||||
requestId: null,
|
||||
property: reverted,
|
||||
schemaVersion: reverted.schemaVersion,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
|
||||
}
|
||||
} catch (cleanupErr) {
|
||||
this.logger.error(
|
||||
`Failed to clear pending type change on property ${data.propertyId}`,
|
||||
cleanupErr as Error,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@OnWorkerEvent('completed')
|
||||
onCompleted(job: Job) {
|
||||
this.logger.debug(`Completed ${job.name} job ${job.id}`);
|
||||
}
|
||||
|
||||
async onModuleDestroy(): Promise<void> {
|
||||
if (this.worker) {
|
||||
await this.worker.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { RedisService } from '@nestjs-labs/nestjs-ioredis';
|
||||
import type { Redis } from 'ioredis';
|
||||
|
||||
const PRESENCE_KEY_PREFIX = 'presence:base:';
|
||||
const PRESENCE_ENTRY_TTL_MS = 10_000;
|
||||
const PRESENCE_KEY_TTL_S = 60;
|
||||
|
||||
export type PresenceEntry = {
|
||||
userId: string;
|
||||
cellId?: string | null;
|
||||
selection?: unknown;
|
||||
ts: number;
|
||||
};
|
||||
|
||||
/*
|
||||
* Ephemeral per-base presence. No DB. `presence:base:{baseId}` is a Redis
|
||||
* HASH keyed by userId with a JSON-serialised entry. Entries older than
|
||||
* PRESENCE_ENTRY_TTL_MS are filtered on read; the key itself is refreshed
|
||||
* with a longer Redis EXPIRE on every write so unused rooms drain on
|
||||
* their own.
|
||||
*/
|
||||
@Injectable()
|
||||
export class BasePresenceService {
|
||||
private readonly logger = new Logger(BasePresenceService.name);
|
||||
private readonly redis: Redis;
|
||||
|
||||
constructor(private readonly redisService: RedisService) {
|
||||
this.redis = this.redisService.getOrThrow();
|
||||
}
|
||||
|
||||
async setPresence(
|
||||
baseId: string,
|
||||
entry: PresenceEntry,
|
||||
): Promise<void> {
|
||||
const key = PRESENCE_KEY_PREFIX + baseId;
|
||||
await this.redis
|
||||
.multi()
|
||||
.hset(key, entry.userId, JSON.stringify(entry))
|
||||
.expire(key, PRESENCE_KEY_TTL_S)
|
||||
.exec();
|
||||
}
|
||||
|
||||
async leave(baseId: string, userId: string): Promise<void> {
|
||||
const key = PRESENCE_KEY_PREFIX + baseId;
|
||||
await this.redis.hdel(key, userId);
|
||||
}
|
||||
|
||||
async snapshot(baseId: string): Promise<PresenceEntry[]> {
|
||||
const key = PRESENCE_KEY_PREFIX + baseId;
|
||||
const raw = await this.redis.hgetall(key);
|
||||
const now = Date.now();
|
||||
const out: PresenceEntry[] = [];
|
||||
const stale: string[] = [];
|
||||
for (const [field, value] of Object.entries(raw)) {
|
||||
try {
|
||||
const entry = JSON.parse(value) as PresenceEntry;
|
||||
if (now - entry.ts <= PRESENCE_ENTRY_TTL_MS) {
|
||||
out.push(entry);
|
||||
} else {
|
||||
stale.push(field);
|
||||
}
|
||||
} catch {
|
||||
stale.push(field);
|
||||
}
|
||||
}
|
||||
// Opportunistic GC so the hash doesn't accumulate during long-lived
|
||||
// rooms where the key TTL keeps getting refreshed by active users.
|
||||
if (stale.length > 0) {
|
||||
this.redis.hdel(key, ...stale).catch(() => {});
|
||||
}
|
||||
return out;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,165 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { EventName } from '../../../common/events/event.contants';
|
||||
import { BaseWsService } from './base-ws.service';
|
||||
import {
|
||||
BasePropertyCreatedEvent,
|
||||
BasePropertyDeletedEvent,
|
||||
BasePropertyReorderedEvent,
|
||||
BasePropertyUpdatedEvent,
|
||||
BaseRowCreatedEvent,
|
||||
BaseRowDeletedEvent,
|
||||
BaseRowReorderedEvent,
|
||||
BaseRowUpdatedEvent,
|
||||
BaseSchemaBumpedEvent,
|
||||
BaseViewCreatedEvent,
|
||||
BaseViewDeletedEvent,
|
||||
BaseViewUpdatedEvent,
|
||||
} from '../events/base-events';
|
||||
|
||||
/*
|
||||
* In-process listeners that forward base domain events onto the
|
||||
* `base-{baseId}` socket.io room. Originating clients suppress their own
|
||||
* echoes via `requestId`.
|
||||
*/
|
||||
@Injectable()
|
||||
export class BaseWsConsumers {
|
||||
private readonly logger = new Logger(BaseWsConsumers.name);
|
||||
|
||||
constructor(private readonly ws: BaseWsService) {}
|
||||
|
||||
@OnEvent(EventName.BASE_ROW_CREATED)
|
||||
onRowCreated(e: BaseRowCreatedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:row:created',
|
||||
baseId: e.baseId,
|
||||
row: e.row,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_ROW_UPDATED)
|
||||
onRowUpdated(e: BaseRowUpdatedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:row:updated',
|
||||
baseId: e.baseId,
|
||||
rowId: e.rowId,
|
||||
patch: e.patch,
|
||||
updatedCells: e.updatedCells,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_ROW_DELETED)
|
||||
onRowDeleted(e: BaseRowDeletedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:row:deleted',
|
||||
baseId: e.baseId,
|
||||
rowId: e.rowId,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_ROW_REORDERED)
|
||||
onRowReordered(e: BaseRowReorderedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:row:reordered',
|
||||
baseId: e.baseId,
|
||||
rowId: e.rowId,
|
||||
position: e.position,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_PROPERTY_CREATED)
|
||||
onPropertyCreated(e: BasePropertyCreatedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:property:created',
|
||||
baseId: e.baseId,
|
||||
property: e.property,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_PROPERTY_UPDATED)
|
||||
onPropertyUpdated(e: BasePropertyUpdatedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:property:updated',
|
||||
baseId: e.baseId,
|
||||
property: e.property,
|
||||
schemaVersion: e.schemaVersion,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_PROPERTY_DELETED)
|
||||
onPropertyDeleted(e: BasePropertyDeletedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:property:deleted',
|
||||
baseId: e.baseId,
|
||||
propertyId: e.propertyId,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_PROPERTY_REORDERED)
|
||||
onPropertyReordered(e: BasePropertyReorderedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:property:reordered',
|
||||
baseId: e.baseId,
|
||||
propertyId: e.propertyId,
|
||||
position: e.position,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_VIEW_CREATED)
|
||||
onViewCreated(e: BaseViewCreatedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:view:created',
|
||||
baseId: e.baseId,
|
||||
view: e.view,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_VIEW_UPDATED)
|
||||
onViewUpdated(e: BaseViewUpdatedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:view:updated',
|
||||
baseId: e.baseId,
|
||||
view: e.view,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_VIEW_DELETED)
|
||||
onViewDeleted(e: BaseViewDeletedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:view:deleted',
|
||||
baseId: e.baseId,
|
||||
viewId: e.viewId,
|
||||
actorId: e.actorId ?? null,
|
||||
requestId: e.requestId ?? null,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_SCHEMA_BUMPED)
|
||||
onSchemaBumped(e: BaseSchemaBumpedEvent) {
|
||||
this.ws.emitToBase(e.baseId, {
|
||||
operation: 'base:schema:bumped',
|
||||
baseId: e.baseId,
|
||||
schemaVersion: e.schemaVersion,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,233 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { z } from 'zod';
|
||||
import type { Server, Socket } from 'socket.io';
|
||||
import { SpaceMemberRepo } from '@docmost/db/repos/space/space-member.repo';
|
||||
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
|
||||
import { findHighestUserSpaceRole } from '@docmost/db/repos/space/utils';
|
||||
import { getBaseRoomName } from '../../../ws/ws.utils';
|
||||
import { BasePresenceService, PresenceEntry } from './base-presence.service';
|
||||
|
||||
/*
|
||||
* Inbound shapes from untrusted socket clients. Zod-validated at the
|
||||
* boundary so malformed payloads (non-uuid baseId, missing fields,
|
||||
* oversized selection blobs) never reach the permission check or Redis.
|
||||
*/
|
||||
const baseSubscribeSchema = z.object({
|
||||
operation: z.literal('base:subscribe'),
|
||||
baseId: z.uuid(),
|
||||
});
|
||||
|
||||
const baseUnsubscribeSchema = z.object({
|
||||
operation: z.literal('base:unsubscribe'),
|
||||
baseId: z.uuid(),
|
||||
});
|
||||
|
||||
const basePresenceSchema = z.object({
|
||||
operation: z.literal('base:presence'),
|
||||
baseId: z.uuid(),
|
||||
cellId: z.string().max(200).optional().nullable(),
|
||||
selection: z.unknown().optional(),
|
||||
});
|
||||
|
||||
const basePresenceLeaveSchema = z.object({
|
||||
operation: z.literal('base:presence:leave'),
|
||||
baseId: z.uuid(),
|
||||
});
|
||||
|
||||
const inboundSchema = z.union([
|
||||
baseSubscribeSchema,
|
||||
baseUnsubscribeSchema,
|
||||
basePresenceSchema,
|
||||
basePresenceLeaveSchema,
|
||||
]);
|
||||
|
||||
type BaseInbound = z.infer<typeof inboundSchema>;
|
||||
|
||||
type BaseOutbound = { operation: `base:${string}` } & Record<string, unknown>;
|
||||
|
||||
@Injectable()
|
||||
export class BaseWsService {
|
||||
private readonly logger = new Logger(BaseWsService.name);
|
||||
private server: Server | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly baseRepo: BaseRepo,
|
||||
private readonly spaceMemberRepo: SpaceMemberRepo,
|
||||
private readonly presence: BasePresenceService,
|
||||
) {}
|
||||
|
||||
setServer(server: Server): void {
|
||||
this.server = server;
|
||||
}
|
||||
|
||||
isBaseEvent(data: any): boolean {
|
||||
return (
|
||||
typeof data?.operation === 'string' && data.operation.startsWith('base:')
|
||||
);
|
||||
}
|
||||
|
||||
async handleInbound(client: Socket, raw: unknown): Promise<void> {
|
||||
const parsed = inboundSchema.safeParse(raw);
|
||||
if (!parsed.success) {
|
||||
this.logger.debug(
|
||||
`Rejecting inbound base event: ${parsed.error.issues[0]?.message}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
const data = parsed.data;
|
||||
switch (data.operation) {
|
||||
case 'base:subscribe':
|
||||
await this.subscribe(client, data.baseId);
|
||||
return;
|
||||
case 'base:unsubscribe':
|
||||
await this.unsubscribe(client, data.baseId);
|
||||
return;
|
||||
case 'base:presence':
|
||||
await this.handlePresence(client, data);
|
||||
return;
|
||||
case 'base:presence:leave':
|
||||
await this.handlePresenceLeave(client, data.baseId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
emitToBase(baseId: string, payload: BaseOutbound): void {
|
||||
if (!this.server) return;
|
||||
this.server.to(getBaseRoomName(baseId)).emit('message', payload);
|
||||
}
|
||||
|
||||
/*
|
||||
* Called from WsGateway on client disconnect. Walks the per-socket
|
||||
* set of subscribed bases and cleans up presence without waiting for
|
||||
* entry TTLs to expire — keeps the snapshot fresh for others in the
|
||||
* room.
|
||||
*/
|
||||
async handleDisconnect(client: Socket): Promise<void> {
|
||||
const userId = client.data?.userId as string | undefined;
|
||||
const subs = this.subscriptionsFor(client);
|
||||
if (!userId || subs.size === 0) return;
|
||||
for (const baseId of subs) {
|
||||
await this.presence.leave(baseId, userId);
|
||||
this.emitToBase(baseId, {
|
||||
operation: 'base:presence:leave',
|
||||
baseId,
|
||||
userId,
|
||||
});
|
||||
}
|
||||
subs.clear();
|
||||
}
|
||||
|
||||
// --- private -------------------------------------------------------
|
||||
|
||||
private async subscribe(client: Socket, baseId: string): Promise<void> {
|
||||
const userId = client.data?.userId as string | undefined;
|
||||
if (!userId) {
|
||||
client.emit('message', {
|
||||
operation: 'base:subscribe:error',
|
||||
baseId,
|
||||
reason: 'unauthenticated',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const base = await this.baseRepo.findById(baseId);
|
||||
if (!base) {
|
||||
client.emit('message', {
|
||||
operation: 'base:subscribe:error',
|
||||
baseId,
|
||||
reason: 'not_found',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const canRead = await this.canReadBaseSpace(userId, base.spaceId);
|
||||
if (!canRead) {
|
||||
client.emit('message', {
|
||||
operation: 'base:subscribe:error',
|
||||
baseId,
|
||||
reason: 'forbidden',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
client.join(getBaseRoomName(baseId));
|
||||
this.subscriptionsFor(client).add(baseId);
|
||||
|
||||
// Send the current presence snapshot to just this client so their UI
|
||||
// can paint who's already editing what.
|
||||
const snapshot = await this.presence.snapshot(baseId);
|
||||
client.emit('message', {
|
||||
operation: 'base:presence:snapshot',
|
||||
baseId,
|
||||
entries: snapshot,
|
||||
});
|
||||
}
|
||||
|
||||
private async unsubscribe(client: Socket, baseId: string): Promise<void> {
|
||||
const userId = client.data?.userId as string | undefined;
|
||||
if (!userId) return;
|
||||
|
||||
client.leave(getBaseRoomName(baseId));
|
||||
this.subscriptionsFor(client).delete(baseId);
|
||||
|
||||
await this.presence.leave(baseId, userId);
|
||||
this.emitToBase(baseId, {
|
||||
operation: 'base:presence:leave',
|
||||
baseId,
|
||||
userId,
|
||||
});
|
||||
}
|
||||
|
||||
private async handlePresence(
|
||||
client: Socket,
|
||||
data: Extract<BaseInbound, { operation: 'base:presence' }>,
|
||||
): Promise<void> {
|
||||
const userId = client.data?.userId as string | undefined;
|
||||
if (!userId) return;
|
||||
if (!client.rooms.has(getBaseRoomName(data.baseId))) return;
|
||||
|
||||
const entry: PresenceEntry = {
|
||||
userId,
|
||||
cellId: data.cellId ?? null,
|
||||
selection: data.selection ?? null,
|
||||
ts: Date.now(),
|
||||
};
|
||||
await this.presence.setPresence(data.baseId, entry);
|
||||
|
||||
this.emitToBase(data.baseId, {
|
||||
operation: 'base:presence',
|
||||
baseId: data.baseId,
|
||||
...entry,
|
||||
});
|
||||
}
|
||||
|
||||
private async handlePresenceLeave(
|
||||
client: Socket,
|
||||
baseId: string,
|
||||
): Promise<void> {
|
||||
const userId = client.data?.userId as string | undefined;
|
||||
if (!userId) return;
|
||||
await this.presence.leave(baseId, userId);
|
||||
this.emitToBase(baseId, {
|
||||
operation: 'base:presence:leave',
|
||||
baseId,
|
||||
userId,
|
||||
});
|
||||
}
|
||||
|
||||
private async canReadBaseSpace(
|
||||
userId: string,
|
||||
spaceId: string,
|
||||
): Promise<boolean> {
|
||||
const roles = await this.spaceMemberRepo.getUserSpaceRoles(userId, spaceId);
|
||||
return !!findHighestUserSpaceRole(roles);
|
||||
}
|
||||
|
||||
private subscriptionsFor(client: Socket): Set<string> {
|
||||
const existing = client.data.baseSubscriptions as Set<string> | undefined;
|
||||
if (existing) return existing;
|
||||
const fresh = new Set<string>();
|
||||
client.data.baseSubscriptions = fresh;
|
||||
return fresh;
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,21 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
ConflictException,
|
||||
Injectable,
|
||||
Logger,
|
||||
NotFoundException,
|
||||
ServiceUnavailableException,
|
||||
} from '@nestjs/common';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { InjectQueue } from '@nestjs/bullmq';
|
||||
import { EventEmitter2 } from '@nestjs/event-emitter';
|
||||
import { Queue } from 'bullmq';
|
||||
import { sql, SqlBool } from 'kysely';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { executeTx } from '@docmost/db/utils';
|
||||
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
|
||||
import { CreatePropertyDto } from '../dto/create-property.dto';
|
||||
import {
|
||||
UpdatePropertyDto,
|
||||
@@ -15,49 +23,135 @@ import {
|
||||
ReorderPropertyDto,
|
||||
} from '../dto/update-property.dto';
|
||||
import {
|
||||
BasePropertyType,
|
||||
BasePropertyTypeValue,
|
||||
parseTypeOptions,
|
||||
attemptCellConversion,
|
||||
validateTypeOptions,
|
||||
isSystemPropertyType,
|
||||
} from '../base.schemas';
|
||||
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
|
||||
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
|
||||
import {
|
||||
IBaseCellGcJob,
|
||||
IBaseTypeConversionJob,
|
||||
} from '../../../integrations/queue/constants/queue.interface';
|
||||
import { EventName } from '../../../common/events/event.contants';
|
||||
import {
|
||||
BasePropertyCreatedEvent,
|
||||
BasePropertyDeletedEvent,
|
||||
BasePropertyReorderedEvent,
|
||||
BasePropertyUpdatedEvent,
|
||||
BaseSchemaBumpedEvent,
|
||||
} from '../events/base-events';
|
||||
import { processBaseTypeConversion } from '../tasks/base-type-conversion.task';
|
||||
|
||||
/*
|
||||
* Types whose cell values are IDs referencing external records. Converting
|
||||
* them to any other type (especially text) requires an ID → display
|
||||
* resolution pass — otherwise `select → text` persists the choice UUID
|
||||
* instead of its display name (the bug that motivated this job).
|
||||
*/
|
||||
const ID_REFERENCING_TYPES: ReadonlySet<BasePropertyTypeValue> = new Set([
|
||||
BasePropertyType.SELECT,
|
||||
BasePropertyType.STATUS,
|
||||
BasePropertyType.MULTI_SELECT,
|
||||
BasePropertyType.PERSON,
|
||||
BasePropertyType.FILE,
|
||||
]);
|
||||
|
||||
/*
|
||||
* Row-count cutoff below which the cell rewrite runs synchronously inside
|
||||
* the HTTP request. Chosen so even worst-case (file → text with attachment
|
||||
* name joins) completes comfortably under the default 30s request timeout.
|
||||
* Larger bases fall back to the BullMQ worker path which flips the type
|
||||
* only after the rewrite completes, showing a "Converting…" header state
|
||||
* in the meantime.
|
||||
*/
|
||||
const INLINE_CONVERSION_ROW_LIMIT = 2000;
|
||||
|
||||
@Injectable()
|
||||
export class BasePropertyService {
|
||||
private readonly logger = new Logger(BasePropertyService.name);
|
||||
|
||||
constructor(
|
||||
@InjectKysely() private readonly db: KyselyDB,
|
||||
private readonly basePropertyRepo: BasePropertyRepo,
|
||||
private readonly baseRowRepo: BaseRowRepo,
|
||||
private readonly baseRepo: BaseRepo,
|
||||
@InjectQueue(QueueName.BASE_QUEUE) private readonly baseQueue: Queue,
|
||||
private readonly eventEmitter: EventEmitter2,
|
||||
) {}
|
||||
|
||||
async create(workspaceId: string, dto: CreatePropertyDto) {
|
||||
async create(workspaceId: string, dto: CreatePropertyDto, actorId?: string) {
|
||||
const type = dto.type as BasePropertyTypeValue;
|
||||
let validatedTypeOptions = null;
|
||||
|
||||
if (dto.typeOptions) {
|
||||
validatedTypeOptions = parseTypeOptions(type, dto.typeOptions);
|
||||
} else {
|
||||
validatedTypeOptions = parseTypeOptions(type, {});
|
||||
}
|
||||
const validatedTypeOptions = dto.typeOptions
|
||||
? parseTypeOptionsOrThrow(type, dto.typeOptions)
|
||||
: parseTypeOptionsOrThrow(type, {});
|
||||
|
||||
const lastPosition = await this.basePropertyRepo.getLastPosition(
|
||||
dto.baseId,
|
||||
);
|
||||
const position = generateJitteredKeyBetween(lastPosition, null);
|
||||
|
||||
return this.basePropertyRepo.insertProperty({
|
||||
baseId: dto.baseId,
|
||||
name: dto.name,
|
||||
type: dto.type,
|
||||
position,
|
||||
typeOptions: validatedTypeOptions as any,
|
||||
workspaceId,
|
||||
const created = await executeTx(this.db, async (trx) => {
|
||||
const row = await this.basePropertyRepo.insertProperty(
|
||||
{
|
||||
baseId: dto.baseId,
|
||||
name: dto.name,
|
||||
type: dto.type,
|
||||
position,
|
||||
typeOptions: validatedTypeOptions as any,
|
||||
workspaceId,
|
||||
},
|
||||
trx,
|
||||
);
|
||||
await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
|
||||
return row;
|
||||
});
|
||||
|
||||
const event: BasePropertyCreatedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: actorId ?? null,
|
||||
requestId: null,
|
||||
property: created,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_PROPERTY_CREATED, event);
|
||||
|
||||
return created;
|
||||
}
|
||||
|
||||
async update(dto: UpdatePropertyDto) {
|
||||
/*
|
||||
* Metadata update. Three paths:
|
||||
*
|
||||
* - Coercion-safe (number↔text, text↔url, etc.): flip the type/type
|
||||
* options in one transaction, bump schema_version, return. The engine
|
||||
* reads cells through schema-on-read extractors so no cell rewrite is
|
||||
* needed.
|
||||
* - ID-referencing or system-involving conversion with a small number
|
||||
* of rows: run the cell rewrite, flip the type, and bump
|
||||
* schema_version all in one transaction — the HTTP request waits but
|
||||
* nobody ever sees raw IDs under the new type.
|
||||
* - Same kind of conversion on a large base: stage the target type on
|
||||
* `pendingType` / `pendingTypeOptions`, keep the live `type` as-is,
|
||||
* enqueue the worker. Clients render under the old type (so cells
|
||||
* resolve to display names, not UUIDs) and show a "Converting…"
|
||||
* badge until the worker transaction commits and swaps the pending
|
||||
* pair onto `type`.
|
||||
*/
|
||||
async update(
|
||||
dto: UpdatePropertyDto,
|
||||
workspaceId: string,
|
||||
actorId?: string,
|
||||
) {
|
||||
const t0 = Date.now();
|
||||
const tick = (label: string) =>
|
||||
this.logger.log(
|
||||
`property-update ${dto.propertyId} ${label}=${Date.now() - t0}ms`,
|
||||
);
|
||||
|
||||
const property = await this.basePropertyRepo.findById(dto.propertyId);
|
||||
tick('after-findById');
|
||||
if (!property) {
|
||||
throw new NotFoundException('Property not found');
|
||||
}
|
||||
@@ -66,56 +160,218 @@ export class BasePropertyService {
|
||||
throw new BadRequestException('Property does not belong to this base');
|
||||
}
|
||||
|
||||
// Block concurrent type changes — the worker still owns the previous
|
||||
// conversion, and letting a second one through would race on `type`.
|
||||
if (property.pendingType) {
|
||||
throw new ConflictException(
|
||||
'A type conversion is already in progress for this property',
|
||||
);
|
||||
}
|
||||
|
||||
const isTypeChange = dto.type && dto.type !== property.type;
|
||||
const oldType = property.type as BasePropertyTypeValue;
|
||||
const oldTypeOptions = property.typeOptions;
|
||||
const newType = (dto.type ?? property.type) as BasePropertyTypeValue;
|
||||
|
||||
let validatedTypeOptions = property.typeOptions;
|
||||
if (dto.typeOptions !== undefined) {
|
||||
validatedTypeOptions = parseTypeOptions(newType, dto.typeOptions) as any;
|
||||
validatedTypeOptions = parseTypeOptionsOrThrow(
|
||||
newType,
|
||||
dto.typeOptions,
|
||||
) as any;
|
||||
} else if (isTypeChange) {
|
||||
const result = validateTypeOptions(newType, {});
|
||||
validatedTypeOptions = result.success ? (result.data as any) : null;
|
||||
}
|
||||
|
||||
let conversionSummary: {
|
||||
converted: number;
|
||||
cleared: number;
|
||||
total: number;
|
||||
} | null = null;
|
||||
const involvesSystem =
|
||||
isSystemPropertyType(oldType) || isSystemPropertyType(newType);
|
||||
const needsIdResolution = ID_REFERENCING_TYPES.has(oldType);
|
||||
const needsCellRewrite =
|
||||
isTypeChange && (involvesSystem || needsIdResolution);
|
||||
|
||||
if (isTypeChange) {
|
||||
const involvesSystem =
|
||||
isSystemPropertyType(property.type) || isSystemPropertyType(newType);
|
||||
|
||||
if (involvesSystem) {
|
||||
conversionSummary = await this.clearCellValues(
|
||||
dto.baseId,
|
||||
// --- Path 1: no cell rewrite needed ---------------------------------
|
||||
if (!needsCellRewrite) {
|
||||
await executeTx(this.db, async (trx) => {
|
||||
await this.basePropertyRepo.updateProperty(
|
||||
dto.propertyId,
|
||||
{
|
||||
...(dto.name !== undefined && { name: dto.name }),
|
||||
...(dto.type !== undefined && { type: dto.type }),
|
||||
typeOptions: validatedTypeOptions,
|
||||
},
|
||||
trx,
|
||||
);
|
||||
} else {
|
||||
conversionSummary = await this.convertCellValues(
|
||||
dto.baseId,
|
||||
dto.propertyId,
|
||||
property.type as BasePropertyTypeValue,
|
||||
newType,
|
||||
);
|
||||
}
|
||||
if (isTypeChange) {
|
||||
await this.basePropertyRepo.bumpSchemaVersion(dto.propertyId, trx);
|
||||
await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
|
||||
}
|
||||
});
|
||||
return this.loadAndEmit(dto, workspaceId, actorId, null);
|
||||
}
|
||||
|
||||
await this.basePropertyRepo.updateProperty(dto.propertyId, {
|
||||
...(dto.name !== undefined && { name: dto.name }),
|
||||
...(dto.type !== undefined && { type: dto.type }),
|
||||
typeOptions: validatedTypeOptions,
|
||||
});
|
||||
// --- Path 2 or 3: cell rewrite needed -------------------------------
|
||||
const conversionPayload: IBaseTypeConversionJob = {
|
||||
baseId: dto.baseId,
|
||||
propertyId: dto.propertyId,
|
||||
workspaceId,
|
||||
fromType: oldType,
|
||||
toType: newType,
|
||||
fromTypeOptions: oldTypeOptions,
|
||||
toTypeOptions: validatedTypeOptions,
|
||||
clearMode: involvesSystem,
|
||||
actorId,
|
||||
};
|
||||
|
||||
const updatedProperty = await this.basePropertyRepo.findById(
|
||||
// Count only the rows whose cell jsonb has this property's key — the
|
||||
// set the worker will actually rewrite. A 100k-row base with the
|
||||
// property set on 12 rows is trivial to convert inline; the previous
|
||||
// count-all-live-rows check was routing those to the worker.
|
||||
const rowsToConvert = await this.countRowsToConvert(
|
||||
dto.baseId,
|
||||
workspaceId,
|
||||
dto.propertyId,
|
||||
);
|
||||
tick(`after-countRowsToConvert(${rowsToConvert})`);
|
||||
|
||||
return { property: updatedProperty, conversionSummary };
|
||||
if (rowsToConvert <= INLINE_CONVERSION_ROW_LIMIT) {
|
||||
tick('taking-inline-path');
|
||||
// Path 2: inline rewrite. Apply the name-only fields (if any), run
|
||||
// the rewrite, then flip the type — all in one transaction so
|
||||
// readers only ever see a consistent snapshot.
|
||||
const schemaVersion = await executeTx(this.db, async (trx) => {
|
||||
if (dto.name !== undefined) {
|
||||
await this.basePropertyRepo.updateProperty(
|
||||
dto.propertyId,
|
||||
{ name: dto.name },
|
||||
trx,
|
||||
);
|
||||
}
|
||||
await processBaseTypeConversion(
|
||||
this.db,
|
||||
this.baseRowRepo,
|
||||
conversionPayload,
|
||||
{ trx },
|
||||
);
|
||||
await this.basePropertyRepo.updateProperty(
|
||||
dto.propertyId,
|
||||
{
|
||||
type: newType,
|
||||
typeOptions: validatedTypeOptions,
|
||||
},
|
||||
trx,
|
||||
);
|
||||
await this.basePropertyRepo.bumpSchemaVersion(dto.propertyId, trx);
|
||||
return this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
|
||||
});
|
||||
tick('inline-tx-done');
|
||||
const bumpEvent: BaseSchemaBumpedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: actorId ?? null,
|
||||
requestId: null,
|
||||
schemaVersion,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_SCHEMA_BUMPED, bumpEvent);
|
||||
return this.loadAndEmit(dto, workspaceId, actorId, null);
|
||||
}
|
||||
|
||||
// Path 3: stage the new type on pending_*, keep live `type` alone,
|
||||
// and hand off to the worker. A best-effort revert clears the staging
|
||||
// fields if the enqueue itself fails.
|
||||
tick('taking-worker-path');
|
||||
await executeTx(this.db, async (trx) => {
|
||||
await this.basePropertyRepo.updateProperty(
|
||||
dto.propertyId,
|
||||
{
|
||||
...(dto.name !== undefined && { name: dto.name }),
|
||||
pendingType: newType,
|
||||
pendingTypeOptions: validatedTypeOptions,
|
||||
},
|
||||
trx,
|
||||
);
|
||||
});
|
||||
tick('after-set-pending');
|
||||
|
||||
let jobId: string | null = null;
|
||||
try {
|
||||
const job = await this.baseQueue.add(
|
||||
QueueJob.BASE_TYPE_CONVERSION,
|
||||
conversionPayload,
|
||||
{ attempts: 1 },
|
||||
);
|
||||
jobId = String(job.id);
|
||||
tick(`after-queue.add(${jobId})`);
|
||||
} catch (err) {
|
||||
this.logger.error(
|
||||
`Enqueue of type-conversion failed for property ${dto.propertyId}; clearing pending state`,
|
||||
err as Error,
|
||||
);
|
||||
try {
|
||||
await this.basePropertyRepo.clearPendingTypeChange(dto.propertyId);
|
||||
} catch (revertErr) {
|
||||
this.logger.error(
|
||||
`Failed to clear pending state on ${dto.propertyId}. Manual intervention required.`,
|
||||
revertErr as Error,
|
||||
);
|
||||
}
|
||||
throw new ServiceUnavailableException(
|
||||
'Type conversion queue unavailable. Property update rolled back.',
|
||||
);
|
||||
}
|
||||
|
||||
const out = await this.loadAndEmit(dto, workspaceId, actorId, jobId);
|
||||
tick('return');
|
||||
return out;
|
||||
}
|
||||
|
||||
async delete(dto: DeletePropertyDto) {
|
||||
/*
|
||||
* Reloads the property and emits `base.property.updated`. The emission
|
||||
* has to happen after the outer transaction commits so socket consumers
|
||||
* never race ahead of visibility.
|
||||
*/
|
||||
private async loadAndEmit(
|
||||
dto: UpdatePropertyDto,
|
||||
workspaceId: string,
|
||||
actorId: string | undefined,
|
||||
jobId: string | null,
|
||||
) {
|
||||
const updated = await this.basePropertyRepo.findById(dto.propertyId);
|
||||
if (updated) {
|
||||
const event: BasePropertyUpdatedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: actorId ?? null,
|
||||
requestId: dto.requestId ?? null,
|
||||
property: updated,
|
||||
schemaVersion: updated.schemaVersion,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
|
||||
}
|
||||
return { property: updated, jobId };
|
||||
}
|
||||
|
||||
private async countRowsToConvert(
|
||||
baseId: string,
|
||||
workspaceId: string,
|
||||
propertyId: string,
|
||||
): Promise<number> {
|
||||
const row = await this.db
|
||||
.selectFrom('baseRows')
|
||||
.select(sql<string>`count(*)`.as('n'))
|
||||
.where('baseId', '=', baseId)
|
||||
.where('workspaceId', '=', workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.where(sql<SqlBool>`cells ? ${propertyId}`)
|
||||
.executeTakeFirst();
|
||||
return Number(row?.n ?? 0);
|
||||
}
|
||||
|
||||
async delete(
|
||||
dto: DeletePropertyDto,
|
||||
workspaceId: string,
|
||||
actorId?: string,
|
||||
) {
|
||||
const property = await this.basePropertyRepo.findById(dto.propertyId);
|
||||
if (!property) {
|
||||
throw new NotFoundException('Property not found');
|
||||
@@ -129,13 +385,56 @@ export class BasePropertyService {
|
||||
throw new BadRequestException('Cannot delete the primary property');
|
||||
}
|
||||
|
||||
// Soft-delete so queries filter the property out immediately, then
|
||||
// enqueue cell-gc to scrub cell keys and hard-delete. If the enqueue
|
||||
// fails, revert the soft-delete so the property isn't orphaned.
|
||||
await executeTx(this.db, async (trx) => {
|
||||
await this.basePropertyRepo.deleteProperty(dto.propertyId, trx);
|
||||
await this.baseRowRepo.removeCellKey(dto.baseId, dto.propertyId, trx);
|
||||
await this.basePropertyRepo.softDelete(dto.propertyId, trx);
|
||||
await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
|
||||
});
|
||||
|
||||
const payload: IBaseCellGcJob = {
|
||||
baseId: dto.baseId,
|
||||
propertyId: dto.propertyId,
|
||||
workspaceId,
|
||||
};
|
||||
try {
|
||||
await this.baseQueue.add(QueueJob.BASE_CELL_GC, payload, { attempts: 2 });
|
||||
} catch (err) {
|
||||
this.logger.error(
|
||||
`Enqueue of cell-gc failed for property ${dto.propertyId}; reverting soft-delete`,
|
||||
err as Error,
|
||||
);
|
||||
try {
|
||||
await this.basePropertyRepo.updateProperty(dto.propertyId, {
|
||||
deletedAt: null,
|
||||
});
|
||||
} catch (revertErr) {
|
||||
this.logger.error(
|
||||
`Revert failed for property ${dto.propertyId}. Manual intervention required.`,
|
||||
revertErr as Error,
|
||||
);
|
||||
}
|
||||
throw new ServiceUnavailableException(
|
||||
'Cell-GC queue unavailable. Property delete rolled back.',
|
||||
);
|
||||
}
|
||||
|
||||
const event: BasePropertyDeletedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: actorId ?? null,
|
||||
requestId: dto.requestId ?? null,
|
||||
propertyId: dto.propertyId,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_PROPERTY_DELETED, event);
|
||||
}
|
||||
|
||||
async reorder(dto: ReorderPropertyDto) {
|
||||
async reorder(
|
||||
dto: ReorderPropertyDto,
|
||||
workspaceId: string,
|
||||
actorId?: string,
|
||||
) {
|
||||
const property = await this.basePropertyRepo.findById(dto.propertyId);
|
||||
if (!property) {
|
||||
throw new NotFoundException('Property not found');
|
||||
@@ -148,69 +447,29 @@ export class BasePropertyService {
|
||||
await this.basePropertyRepo.updateProperty(dto.propertyId, {
|
||||
position: dto.position,
|
||||
});
|
||||
}
|
||||
|
||||
private async clearCellValues(
|
||||
baseId: string,
|
||||
propertyId: string,
|
||||
): Promise<{ converted: number; cleared: number; total: number }> {
|
||||
const rows = await this.baseRowRepo.findAllByBaseId(baseId);
|
||||
const updates: Array<{ id: string; cells: Record<string, unknown> }> = [];
|
||||
|
||||
for (const row of rows) {
|
||||
const cells = row.cells as Record<string, unknown>;
|
||||
if (propertyId in cells) {
|
||||
updates.push({ id: row.id, cells: { [propertyId]: null } });
|
||||
}
|
||||
}
|
||||
|
||||
if (updates.length > 0) {
|
||||
await executeTx(this.db, async (trx) => {
|
||||
await this.baseRowRepo.batchUpdateCells(updates, trx);
|
||||
});
|
||||
}
|
||||
|
||||
return { converted: 0, cleared: updates.length, total: updates.length };
|
||||
}
|
||||
|
||||
private async convertCellValues(
|
||||
baseId: string,
|
||||
propertyId: string,
|
||||
fromType: BasePropertyTypeValue,
|
||||
toType: BasePropertyTypeValue,
|
||||
): Promise<{ converted: number; cleared: number; total: number }> {
|
||||
const rows = await this.baseRowRepo.findAllByBaseId(baseId);
|
||||
let converted = 0;
|
||||
let cleared = 0;
|
||||
let total = 0;
|
||||
|
||||
const updates: Array<{ id: string; cells: Record<string, unknown> }> = [];
|
||||
|
||||
for (const row of rows) {
|
||||
const cells = row.cells as Record<string, unknown>;
|
||||
if (!(propertyId in cells)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
total++;
|
||||
const currentValue = cells[propertyId];
|
||||
const result = attemptCellConversion(fromType, toType, currentValue);
|
||||
|
||||
if (result.converted) {
|
||||
converted++;
|
||||
updates.push({ id: row.id, cells: { [propertyId]: result.value } });
|
||||
} else {
|
||||
cleared++;
|
||||
updates.push({ id: row.id, cells: { [propertyId]: null } });
|
||||
}
|
||||
}
|
||||
|
||||
if (updates.length > 0) {
|
||||
await executeTx(this.db, async (trx) => {
|
||||
await this.baseRowRepo.batchUpdateCells(updates, trx);
|
||||
});
|
||||
}
|
||||
|
||||
return { converted, cleared, total };
|
||||
const event: BasePropertyReorderedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: actorId ?? null,
|
||||
requestId: dto.requestId ?? null,
|
||||
propertyId: dto.propertyId,
|
||||
position: dto.position,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_PROPERTY_REORDERED, event);
|
||||
}
|
||||
}
|
||||
|
||||
function parseTypeOptionsOrThrow(
|
||||
type: BasePropertyTypeValue,
|
||||
typeOptions: unknown,
|
||||
): unknown {
|
||||
try {
|
||||
return parseTypeOptions(type, typeOptions);
|
||||
} catch (err) {
|
||||
throw new BadRequestException({
|
||||
message: 'Invalid typeOptions',
|
||||
issues: (err as any)?.issues ?? [],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
NotFoundException,
|
||||
} from '@nestjs/common';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { EventEmitter2 } from '@nestjs/event-emitter';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
|
||||
@@ -11,6 +12,7 @@ import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
|
||||
import { CreateRowDto } from '../dto/create-row.dto';
|
||||
import {
|
||||
UpdateRowDto,
|
||||
DeleteRowDto,
|
||||
ListRowsDto,
|
||||
ReorderRowDto,
|
||||
} from '../dto/update-row.dto';
|
||||
@@ -22,6 +24,21 @@ import {
|
||||
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
|
||||
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
|
||||
import { BaseProperty } from '@docmost/db/types/entity.types';
|
||||
import {
|
||||
FilterNode,
|
||||
PropertySchema,
|
||||
SearchSpec,
|
||||
filterGroupSchema,
|
||||
searchSchema,
|
||||
validateFilterTree,
|
||||
} from '../engine';
|
||||
import { EventName } from '../../../common/events/event.contants';
|
||||
import {
|
||||
BaseRowCreatedEvent,
|
||||
BaseRowDeletedEvent,
|
||||
BaseRowReorderedEvent,
|
||||
BaseRowUpdatedEvent,
|
||||
} from '../events/base-events';
|
||||
|
||||
@Injectable()
|
||||
export class BaseRowService {
|
||||
@@ -30,19 +47,24 @@ export class BaseRowService {
|
||||
private readonly baseRowRepo: BaseRowRepo,
|
||||
private readonly basePropertyRepo: BasePropertyRepo,
|
||||
private readonly baseViewRepo: BaseViewRepo,
|
||||
private readonly eventEmitter: EventEmitter2,
|
||||
) {}
|
||||
|
||||
async create(userId: string, workspaceId: string, dto: CreateRowDto) {
|
||||
let position: string;
|
||||
|
||||
if (dto.afterRowId) {
|
||||
const afterRow = await this.baseRowRepo.findById(dto.afterRowId);
|
||||
const afterRow = await this.baseRowRepo.findById(dto.afterRowId, {
|
||||
workspaceId,
|
||||
});
|
||||
if (!afterRow || afterRow.baseId !== dto.baseId) {
|
||||
throw new BadRequestException('Invalid afterRowId');
|
||||
}
|
||||
position = generateJitteredKeyBetween(afterRow.position, null);
|
||||
} else {
|
||||
const lastPosition = await this.baseRowRepo.getLastPosition(dto.baseId);
|
||||
const lastPosition = await this.baseRowRepo.getLastPosition(dto.baseId, {
|
||||
workspaceId,
|
||||
});
|
||||
position = generateJitteredKeyBetween(lastPosition, null);
|
||||
}
|
||||
|
||||
@@ -52,68 +74,117 @@ export class BaseRowService {
|
||||
validatedCells = this.validateCells(dto.cells, properties);
|
||||
}
|
||||
|
||||
return this.baseRowRepo.insertRow({
|
||||
const created = await this.baseRowRepo.insertRow({
|
||||
baseId: dto.baseId,
|
||||
cells: validatedCells as any,
|
||||
position,
|
||||
creatorId: userId,
|
||||
workspaceId,
|
||||
});
|
||||
|
||||
const event: BaseRowCreatedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
requestId: dto.requestId ?? null,
|
||||
row: created,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_ROW_CREATED, event);
|
||||
|
||||
return created;
|
||||
}
|
||||
|
||||
async getRowInfo(rowId: string, baseId: string) {
|
||||
const row = await this.baseRowRepo.findById(rowId);
|
||||
async getRowInfo(rowId: string, baseId: string, workspaceId: string) {
|
||||
const row = await this.baseRowRepo.findById(rowId, { workspaceId });
|
||||
if (!row || row.baseId !== baseId) {
|
||||
throw new NotFoundException('Row not found');
|
||||
}
|
||||
return row;
|
||||
}
|
||||
|
||||
async update(dto: UpdateRowDto, userId?: string) {
|
||||
const row = await this.baseRowRepo.findById(dto.rowId);
|
||||
async update(dto: UpdateRowDto, workspaceId: string, userId?: string) {
|
||||
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
|
||||
const validatedCells = this.validateCells(dto.cells, properties);
|
||||
|
||||
const updated = await this.baseRowRepo.updateCells(
|
||||
dto.rowId,
|
||||
validatedCells,
|
||||
{
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
},
|
||||
);
|
||||
|
||||
if (!updated) {
|
||||
throw new NotFoundException('Row not found');
|
||||
}
|
||||
|
||||
const event: BaseRowUpdatedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: userId ?? null,
|
||||
requestId: dto.requestId ?? null,
|
||||
rowId: dto.rowId,
|
||||
patch: dto.cells,
|
||||
updatedCells: validatedCells,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_ROW_UPDATED, event);
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
async delete(dto: DeleteRowDto, workspaceId: string, userId?: string) {
|
||||
const row = await this.baseRowRepo.findById(dto.rowId, { workspaceId });
|
||||
if (!row || row.baseId !== dto.baseId) {
|
||||
throw new NotFoundException('Row not found');
|
||||
}
|
||||
|
||||
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
|
||||
const validatedCells = this.validateCells(dto.cells, properties);
|
||||
await this.baseRowRepo.softDelete(dto.rowId, {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
});
|
||||
|
||||
await this.baseRowRepo.updateCells(dto.rowId, validatedCells, userId);
|
||||
|
||||
return this.baseRowRepo.findById(dto.rowId);
|
||||
const event: BaseRowDeletedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: userId ?? null,
|
||||
requestId: dto.requestId ?? null,
|
||||
rowId: dto.rowId,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_ROW_DELETED, event);
|
||||
}
|
||||
|
||||
async delete(rowId: string, baseId: string) {
|
||||
const row = await this.baseRowRepo.findById(rowId);
|
||||
if (!row || row.baseId !== baseId) {
|
||||
throw new NotFoundException('Row not found');
|
||||
}
|
||||
|
||||
await this.baseRowRepo.softDelete(rowId);
|
||||
}
|
||||
|
||||
async list(dto: ListRowsDto, pagination: PaginationOptions) {
|
||||
const hasFilters = dto.filters && dto.filters.length > 0;
|
||||
const hasSorts = dto.sorts && dto.sorts.length > 0;
|
||||
|
||||
if (!hasFilters && !hasSorts) {
|
||||
return this.baseRowRepo.findByBaseId(dto.baseId, pagination);
|
||||
}
|
||||
|
||||
async list(
|
||||
dto: ListRowsDto,
|
||||
pagination: PaginationOptions,
|
||||
workspaceId: string,
|
||||
) {
|
||||
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
|
||||
const propertyTypeMap = new Map(properties.map((p) => [p.id, p.type]));
|
||||
|
||||
return this.baseRowRepo.findByBaseIdFiltered(
|
||||
dto.baseId,
|
||||
dto.filters ?? [],
|
||||
dto.sorts ?? [],
|
||||
propertyTypeMap,
|
||||
pagination,
|
||||
const schema: PropertySchema = new Map(
|
||||
properties.map((p) => [p.id, p]),
|
||||
);
|
||||
|
||||
const filter = this.normaliseFilter(dto);
|
||||
const search = this.normaliseSearch(dto.search);
|
||||
const sorts = dto.sorts?.map((s) => ({
|
||||
propertyId: s.propertyId,
|
||||
direction: s.direction,
|
||||
}));
|
||||
|
||||
return this.baseRowRepo.list({
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
filter,
|
||||
sorts,
|
||||
search,
|
||||
schema,
|
||||
pagination,
|
||||
});
|
||||
}
|
||||
|
||||
async reorder(dto: ReorderRowDto) {
|
||||
const row = await this.baseRowRepo.findById(dto.rowId);
|
||||
async reorder(dto: ReorderRowDto, workspaceId: string, userId?: string) {
|
||||
const row = await this.baseRowRepo.findById(dto.rowId, { workspaceId });
|
||||
if (!row || row.baseId !== dto.baseId) {
|
||||
throw new NotFoundException('Row not found');
|
||||
}
|
||||
@@ -124,7 +195,52 @@ export class BaseRowService {
|
||||
throw new BadRequestException('Invalid position value');
|
||||
}
|
||||
|
||||
await this.baseRowRepo.updatePosition(dto.rowId, dto.position);
|
||||
await this.baseRowRepo.updatePosition(dto.rowId, dto.position, {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
});
|
||||
|
||||
const event: BaseRowReorderedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: userId ?? null,
|
||||
requestId: dto.requestId ?? null,
|
||||
rowId: dto.rowId,
|
||||
position: dto.position,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_ROW_REORDERED, event);
|
||||
}
|
||||
|
||||
// --- private helpers ------------------------------------------------
|
||||
|
||||
private normaliseFilter(dto: ListRowsDto): FilterNode | undefined {
|
||||
if (!dto.filter) return undefined;
|
||||
|
||||
const parsed = filterGroupSchema.safeParse(dto.filter);
|
||||
if (!parsed.success) {
|
||||
throw new BadRequestException({
|
||||
message: 'Invalid filter tree',
|
||||
issues: parsed.error.issues,
|
||||
});
|
||||
}
|
||||
try {
|
||||
validateFilterTree(parsed.data);
|
||||
} catch (err) {
|
||||
throw new BadRequestException((err as Error).message);
|
||||
}
|
||||
return parsed.data;
|
||||
}
|
||||
|
||||
private normaliseSearch(raw: unknown): SearchSpec | undefined {
|
||||
if (raw == null) return undefined;
|
||||
const parsed = searchSchema.safeParse(raw);
|
||||
if (!parsed.success) {
|
||||
throw new BadRequestException({
|
||||
message: 'Invalid search spec',
|
||||
issues: parsed.error.issues,
|
||||
});
|
||||
}
|
||||
return parsed.data;
|
||||
}
|
||||
|
||||
private validateCells(
|
||||
|
||||
@@ -3,15 +3,25 @@ import {
|
||||
Injectable,
|
||||
NotFoundException,
|
||||
} from '@nestjs/common';
|
||||
import { EventEmitter2 } from '@nestjs/event-emitter';
|
||||
import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
|
||||
import { CreateViewDto } from '../dto/create-view.dto';
|
||||
import { UpdateViewDto, DeleteViewDto } from '../dto/update-view.dto';
|
||||
import { viewConfigSchema } from '../base.schemas';
|
||||
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
|
||||
import { EventName } from '../../../common/events/event.contants';
|
||||
import {
|
||||
BaseViewCreatedEvent,
|
||||
BaseViewDeletedEvent,
|
||||
BaseViewUpdatedEvent,
|
||||
} from '../events/base-events';
|
||||
|
||||
@Injectable()
|
||||
export class BaseViewService {
|
||||
constructor(private readonly baseViewRepo: BaseViewRepo) {}
|
||||
constructor(
|
||||
private readonly baseViewRepo: BaseViewRepo,
|
||||
private readonly eventEmitter: EventEmitter2,
|
||||
) {}
|
||||
|
||||
async create(userId: string, workspaceId: string, dto: CreateViewDto) {
|
||||
let validatedConfig = {};
|
||||
@@ -26,10 +36,12 @@ export class BaseViewService {
|
||||
validatedConfig = result.data;
|
||||
}
|
||||
|
||||
const lastPosition = await this.baseViewRepo.getLastPosition(dto.baseId);
|
||||
const lastPosition = await this.baseViewRepo.getLastPosition(dto.baseId, {
|
||||
workspaceId,
|
||||
});
|
||||
const position = generateJitteredKeyBetween(lastPosition, null);
|
||||
|
||||
return this.baseViewRepo.insertView({
|
||||
const created = await this.baseViewRepo.insertView({
|
||||
baseId: dto.baseId,
|
||||
name: dto.name,
|
||||
type: dto.type ?? 'table',
|
||||
@@ -38,10 +50,21 @@ export class BaseViewService {
|
||||
workspaceId,
|
||||
creatorId: userId,
|
||||
});
|
||||
|
||||
const event: BaseViewCreatedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: userId,
|
||||
requestId: null,
|
||||
view: created,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_VIEW_CREATED, event);
|
||||
|
||||
return created;
|
||||
}
|
||||
|
||||
async update(dto: UpdateViewDto) {
|
||||
const view = await this.baseViewRepo.findById(dto.viewId);
|
||||
async update(dto: UpdateViewDto, workspaceId: string, userId?: string) {
|
||||
const view = await this.baseViewRepo.findById(dto.viewId, { workspaceId });
|
||||
if (!view) {
|
||||
throw new NotFoundException('View not found');
|
||||
}
|
||||
@@ -62,17 +85,36 @@ export class BaseViewService {
|
||||
validatedConfig = result.data;
|
||||
}
|
||||
|
||||
await this.baseViewRepo.updateView(dto.viewId, {
|
||||
...(dto.name !== undefined && { name: dto.name }),
|
||||
...(dto.type !== undefined && { type: dto.type }),
|
||||
...(validatedConfig !== undefined && { config: validatedConfig as any }),
|
||||
await this.baseViewRepo.updateView(
|
||||
dto.viewId,
|
||||
{
|
||||
...(dto.name !== undefined && { name: dto.name }),
|
||||
...(dto.type !== undefined && { type: dto.type }),
|
||||
...(validatedConfig !== undefined && { config: validatedConfig as any }),
|
||||
},
|
||||
{ workspaceId },
|
||||
);
|
||||
|
||||
const updated = await this.baseViewRepo.findById(dto.viewId, {
|
||||
workspaceId,
|
||||
});
|
||||
|
||||
return this.baseViewRepo.findById(dto.viewId);
|
||||
if (updated) {
|
||||
const event: BaseViewUpdatedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: userId ?? null,
|
||||
requestId: null,
|
||||
view: updated,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_VIEW_UPDATED, event);
|
||||
}
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
async delete(dto: DeleteViewDto) {
|
||||
const view = await this.baseViewRepo.findById(dto.viewId);
|
||||
async delete(dto: DeleteViewDto, workspaceId: string, userId?: string) {
|
||||
const view = await this.baseViewRepo.findById(dto.viewId, { workspaceId });
|
||||
if (!view) {
|
||||
throw new NotFoundException('View not found');
|
||||
}
|
||||
@@ -81,15 +123,26 @@ export class BaseViewService {
|
||||
throw new BadRequestException('View does not belong to this base');
|
||||
}
|
||||
|
||||
const viewCount = await this.baseViewRepo.countByBaseId(dto.baseId);
|
||||
const viewCount = await this.baseViewRepo.countByBaseId(dto.baseId, {
|
||||
workspaceId,
|
||||
});
|
||||
if (viewCount <= 1) {
|
||||
throw new BadRequestException('Cannot delete the last view');
|
||||
}
|
||||
|
||||
await this.baseViewRepo.deleteView(dto.viewId);
|
||||
await this.baseViewRepo.deleteView(dto.viewId, { workspaceId });
|
||||
|
||||
const event: BaseViewDeletedEvent = {
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
actorId: userId ?? null,
|
||||
requestId: null,
|
||||
viewId: dto.viewId,
|
||||
};
|
||||
this.eventEmitter.emit(EventName.BASE_VIEW_DELETED, event);
|
||||
}
|
||||
|
||||
async listByBaseId(baseId: string) {
|
||||
return this.baseViewRepo.findByBaseId(baseId);
|
||||
async listByBaseId(baseId: string, workspaceId: string) {
|
||||
return this.baseViewRepo.findByBaseId(baseId, { workspaceId });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ export class BaseService {
|
||||
workspaceId,
|
||||
creatorId: userId,
|
||||
},
|
||||
trx,
|
||||
{ trx },
|
||||
);
|
||||
|
||||
return this.baseRepo.findById(base.id, {
|
||||
|
||||
@@ -0,0 +1,35 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { executeTx } from '@docmost/db/utils';
|
||||
import { IBaseCellGcJob } from '../../../integrations/queue/constants/queue.interface';
|
||||
|
||||
const logger = new Logger('BaseCellGcTask');
|
||||
|
||||
/*
|
||||
* Removes a soft-deleted property's key from every row in the base, then
|
||||
* hard-deletes the property record. Both operations run inside a single
|
||||
* transaction — without it, a failure between `removeCellKey` and
|
||||
* `hardDelete` leaves rows scrubbed while the property row lingers,
|
||||
* requiring manual cleanup. `removeCellKey` is a single
|
||||
* `UPDATE ... SET cells = cells - $propId` statement.
|
||||
*/
|
||||
export async function processBaseCellGc(
|
||||
db: KyselyDB,
|
||||
baseRowRepo: BaseRowRepo,
|
||||
basePropertyRepo: BasePropertyRepo,
|
||||
data: IBaseCellGcJob,
|
||||
): Promise<void> {
|
||||
const { baseId, propertyId, workspaceId } = data;
|
||||
|
||||
await executeTx(db, async (trx) => {
|
||||
await baseRowRepo.removeCellKey(baseId, propertyId, {
|
||||
workspaceId,
|
||||
trx,
|
||||
});
|
||||
await basePropertyRepo.hardDelete(propertyId, trx);
|
||||
});
|
||||
|
||||
logger.log(`cell-gc complete base=${baseId} prop=${propertyId}`);
|
||||
}
|
||||
@@ -0,0 +1,203 @@
|
||||
import { Logger } from '@nestjs/common';
|
||||
import { KyselyDB, KyselyTransaction } from '@docmost/db/types/kysely.types';
|
||||
import { dbOrTx } from '@docmost/db/utils';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import {
|
||||
BasePropertyType,
|
||||
BasePropertyTypeValue,
|
||||
CellConversionContext,
|
||||
attemptCellConversion,
|
||||
} from '../base.schemas';
|
||||
import { IBaseTypeConversionJob } from '../../../integrations/queue/constants/queue.interface';
|
||||
|
||||
const logger = new Logger('BaseTypeConversionTask');
|
||||
|
||||
const CHUNK_SIZE = 1000;
|
||||
|
||||
/*
|
||||
* Handles the cell-rewrite side of a property type change on a base.
|
||||
* Runs per-chunk batched UPDATEs so Node RAM stays flat regardless of row
|
||||
* count. When the source type stores IDs (select / multiSelect / person /
|
||||
* file), it resolves to display values before writing — fixing the
|
||||
* `String(optionId)` bug that the old synchronous path produced.
|
||||
*
|
||||
* The `trx` option lets callers run the whole rewrite inside an outer
|
||||
* transaction. That matters for the inline path in `BasePropertyService`,
|
||||
* where the cell rewrite + `type` swap + `schema_version` bump must land
|
||||
* atomically so readers never observe cells written for a type that hasn't
|
||||
* flipped yet.
|
||||
*/
|
||||
export async function processBaseTypeConversion(
|
||||
db: KyselyDB,
|
||||
baseRowRepo: BaseRowRepo,
|
||||
data: IBaseTypeConversionJob,
|
||||
opts?: {
|
||||
progress?: (processed: number) => Promise<void> | void;
|
||||
trx?: KyselyTransaction;
|
||||
},
|
||||
): Promise<{ converted: number; cleared: number; total: number }> {
|
||||
const {
|
||||
baseId,
|
||||
propertyId,
|
||||
workspaceId,
|
||||
fromType,
|
||||
toType,
|
||||
fromTypeOptions,
|
||||
clearMode,
|
||||
actorId,
|
||||
} = data;
|
||||
|
||||
const progress = opts?.progress;
|
||||
const trx = opts?.trx;
|
||||
const queryDb = dbOrTx(db, trx);
|
||||
|
||||
let total = 0;
|
||||
let converted = 0;
|
||||
let cleared = 0;
|
||||
|
||||
// Only rows whose cell jsonb actually has this property key need
|
||||
// rewriting — everything else is already consistent with the new type
|
||||
// (empty value → empty value). Skips the full-table scan on bases
|
||||
// where the property was only ever set on a few rows.
|
||||
for await (const chunk of baseRowRepo.streamByBaseId(baseId, {
|
||||
workspaceId,
|
||||
chunkSize: CHUNK_SIZE,
|
||||
trx,
|
||||
withCellKey: propertyId,
|
||||
})) {
|
||||
const ctx = await buildCtx(
|
||||
queryDb,
|
||||
chunk,
|
||||
propertyId,
|
||||
fromType,
|
||||
fromTypeOptions,
|
||||
);
|
||||
const updates: Array<{ id: string; patch: Record<string, unknown> }> = [];
|
||||
|
||||
for (const row of chunk) {
|
||||
const cells = (row.cells ?? {}) as Record<string, unknown>;
|
||||
if (!(propertyId in cells)) continue;
|
||||
total++;
|
||||
|
||||
if (clearMode) {
|
||||
updates.push({ id: row.id, patch: { [propertyId]: null } });
|
||||
cleared++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const result = attemptCellConversion(
|
||||
fromType as BasePropertyTypeValue,
|
||||
toType as BasePropertyTypeValue,
|
||||
cells[propertyId],
|
||||
ctx,
|
||||
);
|
||||
if (result.converted) {
|
||||
converted++;
|
||||
updates.push({
|
||||
id: row.id,
|
||||
patch: { [propertyId]: result.value ?? null },
|
||||
});
|
||||
} else {
|
||||
cleared++;
|
||||
updates.push({ id: row.id, patch: { [propertyId]: null } });
|
||||
}
|
||||
}
|
||||
|
||||
if (updates.length > 0) {
|
||||
await baseRowRepo.batchUpdateCells(updates, {
|
||||
baseId,
|
||||
workspaceId,
|
||||
actorId,
|
||||
trx,
|
||||
});
|
||||
}
|
||||
|
||||
if (progress) await progress(total);
|
||||
}
|
||||
|
||||
logger.log(
|
||||
`type-conversion ${fromType}→${toType} base=${baseId} prop=${propertyId} total=${total} converted=${converted} cleared=${cleared}`,
|
||||
);
|
||||
|
||||
return { converted, cleared, total };
|
||||
}
|
||||
|
||||
/*
|
||||
* Builds the resolution context for a chunk. For select/multiSelect the
|
||||
* choice map lives in the property's typeOptions (already in the job
|
||||
* payload). For person and file, we batch-query the IDs present in this
|
||||
* chunk.
|
||||
*/
|
||||
async function buildCtx(
|
||||
db: KyselyDB | KyselyTransaction,
|
||||
chunk: Array<{ cells: unknown }>,
|
||||
propertyId: string,
|
||||
fromType: string,
|
||||
fromTypeOptions: unknown,
|
||||
): Promise<CellConversionContext> {
|
||||
const ctx: CellConversionContext = { fromTypeOptions };
|
||||
|
||||
if (fromType === BasePropertyType.PERSON) {
|
||||
const ids = collectIds(chunk, propertyId);
|
||||
if (ids.size > 0) {
|
||||
const rows = await db
|
||||
.selectFrom('users')
|
||||
.select(['id', 'name', 'email'])
|
||||
.where('id', 'in', Array.from(ids))
|
||||
.execute();
|
||||
ctx.userNames = new Map(
|
||||
rows.map((u) => [u.id, u.name || u.email || '']),
|
||||
);
|
||||
}
|
||||
} else if (fromType === BasePropertyType.FILE) {
|
||||
const ids = collectFileIds(chunk, propertyId);
|
||||
if (ids.size > 0) {
|
||||
const rows = await db
|
||||
.selectFrom('attachments')
|
||||
.select(['id', 'fileName'])
|
||||
.where('id', 'in', Array.from(ids))
|
||||
.execute();
|
||||
ctx.attachmentNames = new Map(rows.map((a) => [a.id, a.fileName]));
|
||||
}
|
||||
}
|
||||
|
||||
return ctx;
|
||||
}
|
||||
|
||||
function collectIds(
|
||||
chunk: Array<{ cells: unknown }>,
|
||||
propertyId: string,
|
||||
): Set<string> {
|
||||
const out = new Set<string>();
|
||||
for (const row of chunk) {
|
||||
const v = (row.cells as any)?.[propertyId];
|
||||
if (v == null) continue;
|
||||
if (Array.isArray(v)) {
|
||||
for (const item of v) {
|
||||
if (typeof item === 'string' && item.length > 0) out.add(item);
|
||||
}
|
||||
} else if (typeof v === 'string' && v.length > 0) {
|
||||
out.add(v);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function collectFileIds(
|
||||
chunk: Array<{ cells: unknown }>,
|
||||
propertyId: string,
|
||||
): Set<string> {
|
||||
const out = new Set<string>();
|
||||
for (const row of chunk) {
|
||||
const v = (row.cells as any)?.[propertyId];
|
||||
if (!Array.isArray(v)) continue;
|
||||
for (const f of v) {
|
||||
if (typeof f === 'string' && f.length > 0) {
|
||||
out.add(f);
|
||||
} else if (f && typeof f === 'object' && typeof f.id === 'string') {
|
||||
out.add(f.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
@@ -0,0 +1,333 @@
|
||||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
// --- Columns -----------------------------------------------------------
|
||||
|
||||
await sql`
|
||||
ALTER TABLE base_rows
|
||||
ADD COLUMN IF NOT EXISTS search_text text,
|
||||
ADD COLUMN IF NOT EXISTS search_tsv tsvector
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
ALTER TABLE base_properties
|
||||
ADD COLUMN IF NOT EXISTS schema_version integer NOT NULL DEFAULT 1,
|
||||
ADD COLUMN IF NOT EXISTS deleted_at timestamptz
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
ALTER TABLE bases
|
||||
ADD COLUMN IF NOT EXISTS schema_version integer NOT NULL DEFAULT 1
|
||||
`.execute(db);
|
||||
|
||||
// --- Schema-on-read extractors ----------------------------------------
|
||||
// Coercion-safe: uncoercible values return NULL, never raise.
|
||||
// IMMUTABLE so the planner can inline them into expression indexes later.
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_text(cells jsonb, prop uuid)
|
||||
RETURNS text
|
||||
LANGUAGE sql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$ SELECT cells->>prop::text $$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_numeric(cells jsonb, prop uuid)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN (cells->>prop::text)::numeric;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_timestamptz(cells jsonb, prop uuid)
|
||||
RETURNS timestamptz
|
||||
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN (cells->>prop::text)::timestamptz;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_bool(cells jsonb, prop uuid)
|
||||
RETURNS boolean
|
||||
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN (cells->>prop::text)::boolean;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_array(cells jsonb, prop uuid)
|
||||
RETURNS jsonb
|
||||
LANGUAGE sql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$ SELECT cells->prop::text $$
|
||||
`.execute(db);
|
||||
|
||||
// --- Surgical JSONB patch (vs. whole-blob `||`) -----------------------
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION jsonb_set_many(target jsonb, patches jsonb)
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE
|
||||
AS $$
|
||||
DECLARE
|
||||
k text;
|
||||
v jsonb;
|
||||
result jsonb := coalesce(target, '{}'::jsonb);
|
||||
BEGIN
|
||||
IF patches IS NULL OR jsonb_typeof(patches) <> 'object' THEN
|
||||
RETURN result;
|
||||
END IF;
|
||||
FOR k, v IN SELECT * FROM jsonb_each(patches) LOOP
|
||||
IF v = 'null'::jsonb THEN
|
||||
result := result - k;
|
||||
ELSE
|
||||
result := jsonb_set(result, ARRAY[k], v, true);
|
||||
END IF;
|
||||
END LOOP;
|
||||
RETURN result;
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
// --- Search text builder (select/multiSelect resolved to choice names) --
|
||||
// STABLE (not IMMUTABLE) because it reads base_properties.
|
||||
//
|
||||
// Transaction-scoped cache: the property list for a given base_id is
|
||||
// read once per transaction and cached via a local GUC. Bulk writes
|
||||
// (CSV import, batch cell update, trigger on N rows) share one lookup
|
||||
// instead of subquerying base_properties per row.
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION build_base_row_search_text(
|
||||
_cells jsonb,
|
||||
_base_id uuid
|
||||
) RETURNS text
|
||||
LANGUAGE plpgsql STABLE PARALLEL SAFE
|
||||
AS $$
|
||||
DECLARE
|
||||
_parts text[] := ARRAY[]::text[];
|
||||
_prop jsonb;
|
||||
_value text;
|
||||
_arr jsonb;
|
||||
_elem jsonb;
|
||||
_resolved text;
|
||||
_cache_key text;
|
||||
_cached text;
|
||||
_props jsonb;
|
||||
BEGIN
|
||||
IF _cells IS NULL OR _cells = '{}'::jsonb OR _base_id IS NULL THEN
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- Transaction-scoped cache of the base's property list.
|
||||
_cache_key := 'bases.prop_cache_' || replace(_base_id::text, '-', '_');
|
||||
_cached := current_setting(_cache_key, true);
|
||||
IF _cached IS NULL OR _cached = '' THEN
|
||||
SELECT coalesce(
|
||||
jsonb_agg(jsonb_build_object(
|
||||
'id', id,
|
||||
'type', type,
|
||||
'type_options', type_options
|
||||
)),
|
||||
'[]'::jsonb
|
||||
)
|
||||
INTO _props
|
||||
FROM base_properties
|
||||
WHERE base_id = _base_id AND deleted_at IS NULL;
|
||||
PERFORM set_config(_cache_key, _props::text, true);
|
||||
ELSE
|
||||
_props := _cached::jsonb;
|
||||
END IF;
|
||||
|
||||
FOR _prop IN SELECT * FROM jsonb_array_elements(_props)
|
||||
LOOP
|
||||
IF (_prop->>'type') IN ('text', 'url', 'email') THEN
|
||||
_value := _cells->>(_prop->>'id');
|
||||
IF _value IS NOT NULL AND _value <> '' THEN
|
||||
_parts := array_append(_parts, _value);
|
||||
END IF;
|
||||
|
||||
ELSIF (_prop->>'type') IN ('select', 'status') THEN
|
||||
_value := _cells->>(_prop->>'id');
|
||||
IF _value IS NOT NULL AND _value <> '' THEN
|
||||
SELECT c->>'name' INTO _resolved
|
||||
FROM jsonb_array_elements(coalesce(_prop->'type_options'->'choices', '[]'::jsonb)) AS c
|
||||
WHERE c->>'id' = _value
|
||||
LIMIT 1;
|
||||
IF _resolved IS NOT NULL AND _resolved <> '' THEN
|
||||
_parts := array_append(_parts, _resolved);
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
ELSIF (_prop->>'type') = 'multiSelect' THEN
|
||||
_arr := _cells->(_prop->>'id');
|
||||
IF jsonb_typeof(_arr) = 'array' THEN
|
||||
FOR _elem IN SELECT * FROM jsonb_array_elements(_arr)
|
||||
LOOP
|
||||
SELECT c->>'name' INTO _resolved
|
||||
FROM jsonb_array_elements(coalesce(_prop->'type_options'->'choices', '[]'::jsonb)) AS c
|
||||
WHERE c->>'id' = _elem#>>'{}'
|
||||
LIMIT 1;
|
||||
IF _resolved IS NOT NULL AND _resolved <> '' THEN
|
||||
_parts := array_append(_parts, _resolved);
|
||||
END IF;
|
||||
END LOOP;
|
||||
END IF;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
IF array_length(_parts, 1) IS NULL THEN
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
RETURN f_unaccent(array_to_string(_parts, ' '));
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
// --- Row search trigger -----------------------------------------------
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_rows_search_trigger() RETURNS trigger
|
||||
LANGUAGE plpgsql AS $$
|
||||
BEGIN
|
||||
NEW.search_text := build_base_row_search_text(NEW.cells, NEW.base_id);
|
||||
NEW.search_tsv := to_tsvector('english', coalesce(NEW.search_text, ''));
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE TRIGGER base_rows_search_update
|
||||
BEFORE INSERT OR UPDATE ON base_rows
|
||||
FOR EACH ROW EXECUTE FUNCTION base_rows_search_trigger()
|
||||
`.execute(db);
|
||||
|
||||
// --- Indexes ----------------------------------------------------------
|
||||
|
||||
// Replace the default-opclass GIN created by the initial bases migration
|
||||
// with the smaller/faster jsonb_path_ops variant. No row-data backfill:
|
||||
// this branch is dev-only; the trigger populates search_text /
|
||||
// search_tsv on the next write to each row.
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_cells_gin`.execute(db);
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_cells_gin_path_ops
|
||||
ON base_rows USING gin (cells jsonb_path_ops)
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db);
|
||||
|
||||
// Complementary default-opclass GIN so the `?` / `?|` / `?&` key-existence
|
||||
// operators are index-satisfiable — `jsonb_path_ops` above only covers
|
||||
// `@>`. Type-conversion and cell-GC paths filter `cells ? propertyId`;
|
||||
// without this the planner falls back to SEQ SCAN (~900ms on 100k rows).
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_cells_gin_keys
|
||||
ON base_rows USING gin (cells)
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db);
|
||||
|
||||
// Workhorse for paginated list: (base_id, position, id) on live rows.
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_base_alive
|
||||
ON base_rows (base_id, position COLLATE "C", id)
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db);
|
||||
|
||||
// Common "most recently edited" sort.
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_base_updated
|
||||
ON base_rows (base_id, updated_at DESC)
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_base_created
|
||||
ON base_rows (base_id, created_at DESC)
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db);
|
||||
|
||||
// Fulltext + trigram search indexes.
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_search_tsv
|
||||
ON base_rows USING gin (search_tsv)
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_search_trgm
|
||||
ON base_rows USING gin (search_text gin_trgm_ops)
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db);
|
||||
|
||||
// Tenant-scoped scans defense-in-depth.
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_workspace
|
||||
ON base_rows (workspace_id, base_id)
|
||||
`.execute(db);
|
||||
|
||||
// Live properties per base (deleted_at partial).
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_properties_base_alive
|
||||
ON base_properties (base_id, position COLLATE "C", id)
|
||||
WHERE deleted_at IS NULL
|
||||
`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
// --- Drop new indexes -------------------------------------------------
|
||||
await sql`DROP INDEX IF EXISTS idx_base_properties_base_alive`.execute(db);
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_workspace`.execute(db);
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_search_trgm`.execute(db);
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_search_tsv`.execute(db);
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_base_created`.execute(db);
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_base_updated`.execute(db);
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_base_alive`.execute(db);
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_cells_gin_keys`.execute(db);
|
||||
await sql`DROP INDEX IF EXISTS idx_base_rows_cells_gin_path_ops`.execute(db);
|
||||
|
||||
// Restore the original GIN that the initial bases migration created.
|
||||
await sql`
|
||||
CREATE INDEX IF NOT EXISTS idx_base_rows_cells_gin
|
||||
ON base_rows USING gin (cells)
|
||||
`.execute(db);
|
||||
|
||||
// --- Drop trigger, trigger fn, helpers --------------------------------
|
||||
await sql`DROP TRIGGER IF EXISTS base_rows_search_update ON base_rows`.execute(db);
|
||||
await sql`DROP FUNCTION IF EXISTS base_rows_search_trigger()`.execute(db);
|
||||
await sql`DROP FUNCTION IF EXISTS build_base_row_search_text(jsonb, uuid)`.execute(db);
|
||||
await sql`DROP FUNCTION IF EXISTS jsonb_set_many(jsonb, jsonb)`.execute(db);
|
||||
await sql`DROP FUNCTION IF EXISTS base_cell_array(jsonb, uuid)`.execute(db);
|
||||
await sql`DROP FUNCTION IF EXISTS base_cell_bool(jsonb, uuid)`.execute(db);
|
||||
await sql`DROP FUNCTION IF EXISTS base_cell_timestamptz(jsonb, uuid)`.execute(db);
|
||||
await sql`DROP FUNCTION IF EXISTS base_cell_numeric(jsonb, uuid)`.execute(db);
|
||||
await sql`DROP FUNCTION IF EXISTS base_cell_text(jsonb, uuid)`.execute(db);
|
||||
|
||||
// --- Drop columns -----------------------------------------------------
|
||||
await sql`ALTER TABLE bases DROP COLUMN IF EXISTS schema_version`.execute(db);
|
||||
await sql`
|
||||
ALTER TABLE base_properties
|
||||
DROP COLUMN IF EXISTS deleted_at,
|
||||
DROP COLUMN IF EXISTS schema_version
|
||||
`.execute(db);
|
||||
await sql`
|
||||
ALTER TABLE base_rows
|
||||
DROP COLUMN IF EXISTS search_tsv,
|
||||
DROP COLUMN IF EXISTS search_text
|
||||
`.execute(db);
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
/*
|
||||
* Adds `pending_type` / `pending_type_options` to `base_properties` so
|
||||
* async type conversions can run without flipping the live type prematurely.
|
||||
* The worker swaps them onto `type` / `type_options` in the same
|
||||
* transaction that bumps schema_version, so clients never observe raw IDs
|
||||
* under a post-conversion type.
|
||||
*/
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
await sql`
|
||||
ALTER TABLE base_properties
|
||||
ADD COLUMN IF NOT EXISTS pending_type varchar,
|
||||
ADD COLUMN IF NOT EXISTS pending_type_options jsonb
|
||||
`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
await sql`
|
||||
ALTER TABLE base_properties
|
||||
DROP COLUMN IF EXISTS pending_type_options,
|
||||
DROP COLUMN IF EXISTS pending_type
|
||||
`.execute(db);
|
||||
}
|
||||
@@ -15,14 +15,15 @@ export class BasePropertyRepo {
|
||||
|
||||
async findById(
|
||||
propertyId: string,
|
||||
opts?: { trx?: KyselyTransaction },
|
||||
opts?: { trx?: KyselyTransaction; includeDeleted?: boolean },
|
||||
): Promise<BaseProperty | undefined> {
|
||||
const db = dbOrTx(this.db, opts?.trx);
|
||||
return db
|
||||
let qb = db
|
||||
.selectFrom('baseProperties')
|
||||
.selectAll()
|
||||
.where('id', '=', propertyId)
|
||||
.executeTakeFirst() as Promise<BaseProperty | undefined>;
|
||||
.where('id', '=', propertyId);
|
||||
if (!opts?.includeDeleted) qb = qb.where('deletedAt', 'is', null);
|
||||
return qb.executeTakeFirst() as Promise<BaseProperty | undefined>;
|
||||
}
|
||||
|
||||
async findByBaseId(
|
||||
@@ -34,6 +35,7 @@ export class BasePropertyRepo {
|
||||
.selectFrom('baseProperties')
|
||||
.selectAll()
|
||||
.where('baseId', '=', baseId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.orderBy('position', 'asc')
|
||||
.execute() as Promise<BaseProperty[]>;
|
||||
}
|
||||
@@ -78,7 +80,19 @@ export class BasePropertyRepo {
|
||||
.execute();
|
||||
}
|
||||
|
||||
async deleteProperty(
|
||||
async softDelete(
|
||||
propertyId: string,
|
||||
trx?: KyselyTransaction,
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
await db
|
||||
.updateTable('baseProperties')
|
||||
.set({ deletedAt: new Date(), updatedAt: new Date() })
|
||||
.where('id', '=', propertyId)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async hardDelete(
|
||||
propertyId: string,
|
||||
trx?: KyselyTransaction,
|
||||
): Promise<void> {
|
||||
@@ -88,4 +102,60 @@ export class BasePropertyRepo {
|
||||
.where('id', '=', propertyId)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async bumpSchemaVersion(
|
||||
propertyId: string,
|
||||
trx?: KyselyTransaction,
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
await db
|
||||
.updateTable('baseProperties')
|
||||
.set({
|
||||
schemaVersion: sql`schema_version + 1`,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where('id', '=', propertyId)
|
||||
.execute();
|
||||
}
|
||||
|
||||
/*
|
||||
* Promotes `pending_type` / `pending_type_options` onto the live `type` /
|
||||
* `type_options` columns and clears the pending pair. No-op if no
|
||||
* conversion was pending. Caller is responsible for doing this inside the
|
||||
* same transaction as the cell rewrite so readers never see a
|
||||
* half-converted state.
|
||||
*/
|
||||
async commitPendingTypeChange(
|
||||
propertyId: string,
|
||||
trx?: KyselyTransaction,
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
await db
|
||||
.updateTable('baseProperties')
|
||||
.set({
|
||||
type: sql`coalesce(pending_type, type)`,
|
||||
typeOptions: sql`coalesce(pending_type_options, type_options)`,
|
||||
pendingType: null,
|
||||
pendingTypeOptions: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where('id', '=', propertyId)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async clearPendingTypeChange(
|
||||
propertyId: string,
|
||||
trx?: KyselyTransaction,
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
await db
|
||||
.updateTable('baseProperties')
|
||||
.set({
|
||||
pendingType: null,
|
||||
pendingTypeOptions: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where('id', '=', propertyId)
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,21 +7,39 @@ import {
|
||||
InsertableBaseRow,
|
||||
} from '@docmost/db/types/entity.types';
|
||||
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
|
||||
import { executeWithCursorPagination } from '@docmost/db/pagination/cursor-pagination';
|
||||
import { sql, SelectQueryBuilder, SqlBool } from 'kysely';
|
||||
import { DB } from '@docmost/db/types/db';
|
||||
import {
|
||||
CursorPaginationResult,
|
||||
executeWithCursorPagination,
|
||||
} from '@docmost/db/pagination/cursor-pagination';
|
||||
import { sql, SqlBool } from 'kysely';
|
||||
import {
|
||||
FilterNode,
|
||||
PropertySchema,
|
||||
SearchSpec,
|
||||
SortSpec,
|
||||
runListQuery,
|
||||
} from '../../../core/base/engine';
|
||||
|
||||
const SYSTEM_COLUMN_MAP: Record<string, string> = {
|
||||
createdAt: 'createdAt',
|
||||
lastEditedAt: 'updatedAt',
|
||||
lastEditedBy: 'lastUpdatedById',
|
||||
};
|
||||
type RepoOpts = { trx?: KyselyTransaction };
|
||||
type WorkspaceOpts = { workspaceId: string } & RepoOpts;
|
||||
|
||||
const ARRAY_TYPES = new Set(['multiSelect', 'person', 'file']);
|
||||
|
||||
function escapeIlike(value: string): string {
|
||||
return value.replace(/[%_\\]/g, '\\$&');
|
||||
}
|
||||
// Columns that make up the public `BaseRow` shape.
|
||||
// `search_text` and `search_tsv` are internal fulltext-index columns
|
||||
// maintained by a trigger — they must never leak into API responses or
|
||||
// socket payloads. Every SELECT/RETURNING path in this repo references
|
||||
// this constant.
|
||||
const BASE_ROW_COLUMNS = [
|
||||
'id',
|
||||
'baseId',
|
||||
'cells',
|
||||
'position',
|
||||
'creatorId',
|
||||
'lastUpdatedById',
|
||||
'workspaceId',
|
||||
'createdAt',
|
||||
'updatedAt',
|
||||
'deletedAt',
|
||||
] as const;
|
||||
|
||||
@Injectable()
|
||||
export class BaseRowRepo {
|
||||
@@ -29,54 +47,82 @@ export class BaseRowRepo {
|
||||
|
||||
async findById(
|
||||
rowId: string,
|
||||
opts?: { trx?: KyselyTransaction },
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<BaseRow | undefined> {
|
||||
const db = dbOrTx(this.db, opts?.trx);
|
||||
return db
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
return (await db
|
||||
.selectFrom('baseRows')
|
||||
.selectAll()
|
||||
.select(BASE_ROW_COLUMNS)
|
||||
.where('id', '=', rowId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.executeTakeFirst() as Promise<BaseRow | undefined>;
|
||||
.executeTakeFirst()) as BaseRow | undefined;
|
||||
}
|
||||
|
||||
async findByBaseId(
|
||||
baseId: string,
|
||||
pagination: PaginationOptions,
|
||||
opts?: { trx?: KyselyTransaction },
|
||||
) {
|
||||
const db = dbOrTx(this.db, opts?.trx);
|
||||
async list(opts: {
|
||||
baseId: string;
|
||||
workspaceId: string;
|
||||
filter?: FilterNode;
|
||||
sorts?: SortSpec[];
|
||||
search?: SearchSpec;
|
||||
schema: PropertySchema;
|
||||
pagination: PaginationOptions;
|
||||
trx?: KyselyTransaction;
|
||||
}): Promise<CursorPaginationResult<BaseRow>> {
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
|
||||
const query = db
|
||||
const base = db
|
||||
.selectFrom('baseRows')
|
||||
.selectAll()
|
||||
.where('baseId', '=', baseId)
|
||||
.select(BASE_ROW_COLUMNS)
|
||||
.where('baseId', '=', opts.baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.where('deletedAt', 'is', null);
|
||||
|
||||
return executeWithCursorPagination(query, {
|
||||
perPage: pagination.limit,
|
||||
cursor: pagination.cursor,
|
||||
beforeCursor: pagination.beforeCursor,
|
||||
fields: [
|
||||
{ expression: 'position', direction: 'asc' },
|
||||
{ expression: 'id', direction: 'asc' },
|
||||
],
|
||||
parseCursor: (cursor) => ({
|
||||
position: cursor.position,
|
||||
id: cursor.id,
|
||||
}),
|
||||
const hasFilterSortSearch =
|
||||
!!opts.filter || (opts.sorts && opts.sorts.length > 0) || !!opts.search;
|
||||
|
||||
if (!hasFilterSortSearch) {
|
||||
// Fast path: keyset-paginated list ordered by (position COLLATE "C", id)
|
||||
// to match idx_base_rows_base_alive. Without the collation hint the
|
||||
// planner falls back to a Sort node on every page.
|
||||
return executeWithCursorPagination(base as any, {
|
||||
perPage: opts.pagination.limit,
|
||||
cursor: opts.pagination.cursor,
|
||||
beforeCursor: opts.pagination.beforeCursor,
|
||||
fields: [
|
||||
{
|
||||
expression: sql`position COLLATE "C"`,
|
||||
direction: 'asc',
|
||||
key: 'position',
|
||||
},
|
||||
{ expression: 'id', direction: 'asc', key: 'id' },
|
||||
],
|
||||
parseCursor: (c) => ({
|
||||
position: c.position,
|
||||
id: c.id,
|
||||
}),
|
||||
} as any) as unknown as Promise<CursorPaginationResult<BaseRow>>;
|
||||
}
|
||||
|
||||
return runListQuery(base as any, {
|
||||
filter: opts.filter,
|
||||
sorts: opts.sorts,
|
||||
search: opts.search,
|
||||
schema: opts.schema,
|
||||
pagination: opts.pagination,
|
||||
});
|
||||
}
|
||||
|
||||
async getLastPosition(
|
||||
baseId: string,
|
||||
trx?: KyselyTransaction,
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<string | null> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
const result = await db
|
||||
.selectFrom('baseRows')
|
||||
.select('position')
|
||||
.where('baseId', '=', baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.orderBy(sql`position COLLATE "C"`, sql`DESC`)
|
||||
.limit(1)
|
||||
@@ -86,425 +132,199 @@ export class BaseRowRepo {
|
||||
|
||||
async insertRow(
|
||||
row: InsertableBaseRow,
|
||||
trx?: KyselyTransaction,
|
||||
opts?: RepoOpts,
|
||||
): Promise<BaseRow> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
return db
|
||||
const db = dbOrTx(this.db, opts?.trx);
|
||||
return (await db
|
||||
.insertInto('baseRows')
|
||||
.values(row)
|
||||
.returningAll()
|
||||
.executeTakeFirstOrThrow() as Promise<BaseRow>;
|
||||
.returning(BASE_ROW_COLUMNS)
|
||||
.executeTakeFirstOrThrow()) as BaseRow;
|
||||
}
|
||||
|
||||
/*
|
||||
* Merges `patch` into the row's cells via `jsonb_set_many` and returns
|
||||
* the updated row (public columns only — search_text/search_tsv are
|
||||
* excluded from RETURNING). Single round-trip; replaces the old
|
||||
* "updateCells + findById" two-query dance.
|
||||
*/
|
||||
async updateCells(
|
||||
rowId: string,
|
||||
cells: Record<string, unknown>,
|
||||
userId?: string,
|
||||
trx?: KyselyTransaction,
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
await db
|
||||
patch: Record<string, unknown>,
|
||||
opts: {
|
||||
baseId: string;
|
||||
workspaceId: string;
|
||||
actorId?: string;
|
||||
trx?: KyselyTransaction;
|
||||
},
|
||||
): Promise<BaseRow | undefined> {
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
// Cast through text because postgres.js auto-detects a JSON-shaped
|
||||
// string as jsonb and re-encodes it, producing a jsonb *string* instead
|
||||
// of an object — which `jsonb_set_many` then treats as a no-op.
|
||||
const patchJson = JSON.stringify(patch);
|
||||
return (await db
|
||||
.updateTable('baseRows')
|
||||
.set({
|
||||
cells: sql`cells || ${cells}`,
|
||||
cells: sql`jsonb_set_many(cells, ${patchJson}::text::jsonb)`,
|
||||
updatedAt: new Date(),
|
||||
lastUpdatedById: userId ?? null,
|
||||
lastUpdatedById: opts.actorId ?? null,
|
||||
})
|
||||
.where('id', '=', rowId)
|
||||
.where('baseId', '=', opts.baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.execute();
|
||||
.returning(BASE_ROW_COLUMNS)
|
||||
.executeTakeFirst()) as BaseRow | undefined;
|
||||
}
|
||||
|
||||
async updatePosition(
|
||||
rowId: string,
|
||||
position: string,
|
||||
trx?: KyselyTransaction,
|
||||
opts: {
|
||||
baseId: string;
|
||||
workspaceId: string;
|
||||
trx?: KyselyTransaction;
|
||||
},
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
await db
|
||||
.updateTable('baseRows')
|
||||
.set({ position, updatedAt: new Date() })
|
||||
.where('id', '=', rowId)
|
||||
.where('baseId', '=', opts.baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async softDelete(rowId: string, trx?: KyselyTransaction): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
async softDelete(
|
||||
rowId: string,
|
||||
opts: {
|
||||
baseId: string;
|
||||
workspaceId: string;
|
||||
trx?: KyselyTransaction;
|
||||
},
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
await db
|
||||
.updateTable('baseRows')
|
||||
.set({ deletedAt: new Date() })
|
||||
.where('id', '=', rowId)
|
||||
.where('baseId', '=', opts.baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async removeCellKey(
|
||||
baseId: string,
|
||||
propertyId: string,
|
||||
trx?: KyselyTransaction,
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
await db
|
||||
.updateTable('baseRows')
|
||||
.set({
|
||||
cells: sql`cells - ${propertyId}`,
|
||||
cells: sql`cells - ${propertyId}::text`,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where('baseId', '=', baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async findAllByBaseId(
|
||||
/*
|
||||
* Streams every live row of a base in deterministic order via keyset
|
||||
* pagination so async jobs (type-conversion, cell-gc, export) can process
|
||||
* large bases without loading the full set into memory.
|
||||
*
|
||||
* `withCellKey` restricts the scan to rows whose cell jsonb contains
|
||||
* that top-level key. Type-conversion callers pass the property ID so
|
||||
* we don't drag 100k empty rows through Node just to rewrite a dozen.
|
||||
*/
|
||||
async *streamByBaseId(
|
||||
baseId: string,
|
||||
trx?: KyselyTransaction,
|
||||
): Promise<BaseRow[]> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
return db
|
||||
.selectFrom('baseRows')
|
||||
.selectAll()
|
||||
.where('baseId', '=', baseId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.execute() as Promise<BaseRow[]>;
|
||||
opts: {
|
||||
workspaceId: string;
|
||||
chunkSize?: number;
|
||||
trx?: KyselyTransaction;
|
||||
withCellKey?: string;
|
||||
},
|
||||
): AsyncGenerator<BaseRow[], void, void> {
|
||||
const chunkSize = opts.chunkSize ?? 1000;
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
let afterPosition: string | null = null;
|
||||
let afterId: string | null = null;
|
||||
|
||||
while (true) {
|
||||
let qb = db
|
||||
.selectFrom('baseRows')
|
||||
.select(BASE_ROW_COLUMNS)
|
||||
.where('baseId', '=', baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.orderBy(sql`position COLLATE "C"`, 'asc')
|
||||
.orderBy('id', 'asc')
|
||||
.limit(chunkSize);
|
||||
|
||||
if (opts.withCellKey) {
|
||||
qb = qb.where(sql<SqlBool>`cells ? ${opts.withCellKey}`);
|
||||
}
|
||||
|
||||
if (afterPosition !== null && afterId !== null) {
|
||||
qb = qb.where((eb) =>
|
||||
eb.or([
|
||||
eb(sql`position COLLATE "C"`, '>', afterPosition!),
|
||||
eb.and([
|
||||
eb(sql`position COLLATE "C"`, '=', afterPosition!),
|
||||
eb('id', '>', afterId!),
|
||||
]),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
const chunk = (await qb.execute()) as BaseRow[];
|
||||
if (chunk.length === 0) return;
|
||||
yield chunk;
|
||||
if (chunk.length < chunkSize) return;
|
||||
const last = chunk[chunk.length - 1];
|
||||
afterPosition = last.position;
|
||||
afterId = last.id;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Real batch: one `UPDATE ... FROM (SELECT unnest($ids), unnest($patches))`
|
||||
* per call. Callers chunk (typically 1000 per call) from inside a BullMQ
|
||||
* job. `cells` is merged via `jsonb_set_many` so only touched subtrees
|
||||
* rewrite.
|
||||
*/
|
||||
async batchUpdateCells(
|
||||
updates: Array<{ id: string; cells: Record<string, unknown> }>,
|
||||
trx?: KyselyTransaction,
|
||||
updates: Array<{ id: string; patch: Record<string, unknown> }>,
|
||||
opts: {
|
||||
baseId: string;
|
||||
workspaceId: string;
|
||||
actorId?: string;
|
||||
trx?: KyselyTransaction;
|
||||
},
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
for (const update of updates) {
|
||||
await db
|
||||
.updateTable('baseRows')
|
||||
.set({
|
||||
cells: sql`cells || ${update.cells}`,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where('id', '=', update.id)
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
if (updates.length === 0) return;
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
|
||||
async findByBaseIdFiltered(
|
||||
baseId: string,
|
||||
filters: Array<{ propertyId: string; operator: string; value?: unknown }>,
|
||||
sorts: Array<{ propertyId: string; direction: string }>,
|
||||
propertyTypeMap: Map<string, string>,
|
||||
pagination: PaginationOptions,
|
||||
opts?: { trx?: KyselyTransaction },
|
||||
) {
|
||||
const db = dbOrTx(this.db, opts?.trx);
|
||||
const ids = updates.map((u) => u.id);
|
||||
const patches = updates.map((u) => JSON.stringify(u.patch));
|
||||
|
||||
let query = db
|
||||
.selectFrom('baseRows')
|
||||
.selectAll()
|
||||
.where('baseId', '=', baseId)
|
||||
.where('deletedAt', 'is', null) as SelectQueryBuilder<DB, 'baseRows', any>;
|
||||
|
||||
// Apply filters
|
||||
for (const filter of filters) {
|
||||
query = this.applyFilter(query, filter, propertyTypeMap);
|
||||
}
|
||||
|
||||
// Build cursor-compatible sort fields.
|
||||
// COALESCE sort expressions so NULLs never reach the cursor encoder/comparator.
|
||||
// ASC NULLS LAST → COALESCE(expr, <high sentinel>)
|
||||
// DESC NULLS LAST → COALESCE(expr, <low sentinel>)
|
||||
const sortMeta: Array<{
|
||||
alias: string;
|
||||
expression: ReturnType<typeof sql>;
|
||||
direction: 'asc' | 'desc';
|
||||
isNumeric: boolean;
|
||||
}> = [];
|
||||
|
||||
for (let i = 0; i < sorts.length; i++) {
|
||||
const sort = sorts[i];
|
||||
const type = propertyTypeMap.get(sort.propertyId);
|
||||
if (!type) continue;
|
||||
|
||||
const dir = (sort.direction === 'desc' ? 'desc' : 'asc') as 'asc' | 'desc';
|
||||
const alias = `s${i}`;
|
||||
let expression: ReturnType<typeof sql>;
|
||||
let isNumeric = false;
|
||||
|
||||
const systemCol = SYSTEM_COLUMN_MAP[type];
|
||||
if (systemCol) {
|
||||
// System columns (createdAt, updatedAt) are NOT NULL — no COALESCE needed
|
||||
expression = sql`"${sql.raw(systemCol)}"`;
|
||||
} else if (type === 'number') {
|
||||
isNumeric = true;
|
||||
const sentinel = dir === 'asc' ? "'Infinity'::numeric" : "'-Infinity'::numeric";
|
||||
expression = sql`COALESCE((cells->>'${sql.raw(sort.propertyId)}')::numeric, ${sql.raw(sentinel)})`;
|
||||
} else {
|
||||
// Text, date, select, etc.
|
||||
const sentinel = dir === 'asc' ? 'chr(1114111)' : "''";
|
||||
expression = sql`COALESCE(cells->>'${sql.raw(sort.propertyId)}', ${sql.raw(sentinel)})`;
|
||||
}
|
||||
|
||||
sortMeta.push({ alias, expression, direction: dir, isNumeric });
|
||||
query = query.select(expression.as(alias)) as any;
|
||||
}
|
||||
|
||||
// Cursor pagination fields: sort aliases + position + id tiebreakers.
|
||||
// executeWithCursorPagination applies ORDER BY and builds the keyset WHERE from these.
|
||||
const fields = [
|
||||
...sortMeta.map(({ alias, expression, direction }) => ({
|
||||
expression,
|
||||
direction,
|
||||
key: alias,
|
||||
})),
|
||||
{ expression: 'position' as any, direction: 'asc' as const, key: 'position' },
|
||||
{ expression: 'id' as any, direction: 'asc' as const, key: 'id' },
|
||||
];
|
||||
|
||||
return executeWithCursorPagination(query as any, {
|
||||
perPage: pagination.limit,
|
||||
cursor: pagination.cursor,
|
||||
beforeCursor: pagination.beforeCursor,
|
||||
fields: fields as any,
|
||||
encodeCursor: (values: Array<[string, unknown]>) => {
|
||||
const cursor = new URLSearchParams();
|
||||
for (const [key, value] of values) {
|
||||
if (value === null || value === undefined) {
|
||||
cursor.set(key, '__null__');
|
||||
} else if (value instanceof Date) {
|
||||
cursor.set(key, value.toISOString());
|
||||
} else {
|
||||
cursor.set(key, String(value));
|
||||
}
|
||||
}
|
||||
return Buffer.from(cursor.toString(), 'utf8').toString('base64url');
|
||||
},
|
||||
decodeCursor: (cursorStr: string, fieldNames: string[]) => {
|
||||
const parsed = new URLSearchParams(
|
||||
Buffer.from(cursorStr, 'base64url').toString('utf8'),
|
||||
);
|
||||
const result: Record<string, string> = {};
|
||||
for (const name of fieldNames) {
|
||||
result[name] = parsed.get(name) ?? '';
|
||||
}
|
||||
return result;
|
||||
},
|
||||
parseCursor: (decoded: any) => {
|
||||
const result: Record<string, unknown> = {};
|
||||
for (const { alias, isNumeric } of sortMeta) {
|
||||
const val = decoded[alias];
|
||||
if (val === '__null__') {
|
||||
result[alias] = null;
|
||||
} else {
|
||||
result[alias] = isNumeric ? parseFloat(val) : val;
|
||||
}
|
||||
}
|
||||
result.position = decoded.position;
|
||||
result.id = decoded.id;
|
||||
return result;
|
||||
},
|
||||
} as any);
|
||||
}
|
||||
|
||||
private applyFilter(
|
||||
query: SelectQueryBuilder<DB, 'baseRows', any>,
|
||||
filter: { propertyId: string; operator: string; value?: unknown },
|
||||
propertyTypeMap: Map<string, string>,
|
||||
): SelectQueryBuilder<DB, 'baseRows', any> {
|
||||
const { propertyId, operator, value } = filter;
|
||||
const propertyType = propertyTypeMap.get(propertyId);
|
||||
if (!propertyType) return query;
|
||||
|
||||
// System property -> use actual column
|
||||
const systemCol = SYSTEM_COLUMN_MAP[propertyType];
|
||||
if (systemCol) {
|
||||
return this.applyColumnFilter(query, systemCol, operator, value, propertyType);
|
||||
}
|
||||
|
||||
const isArray = ARRAY_TYPES.has(propertyType);
|
||||
|
||||
// isEmpty / isNotEmpty don't need a value
|
||||
if (operator === 'isEmpty') {
|
||||
if (isArray) {
|
||||
return query.where(({ or, eb }) =>
|
||||
or([
|
||||
eb(sql.raw(`cells->'${propertyId}'`), 'is', null),
|
||||
eb(sql`jsonb_array_length(cells->'${sql.raw(propertyId)}')`, '=', 0),
|
||||
]),
|
||||
);
|
||||
}
|
||||
return query.where(({ or, eb }) =>
|
||||
or([
|
||||
eb(sql.raw(`cells->>'${propertyId}'`), 'is', null),
|
||||
eb(sql.raw(`cells->>'${propertyId}'`), '=', ''),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
if (operator === 'isNotEmpty') {
|
||||
if (isArray) {
|
||||
return query
|
||||
.where(sql.raw(`cells->'${propertyId}'`), 'is not', null)
|
||||
.where(sql`jsonb_array_length(cells->'${sql.raw(propertyId)}')`, '>', 0);
|
||||
}
|
||||
return query
|
||||
.where(sql.raw(`cells->>'${propertyId}'`), 'is not', null)
|
||||
.where(sql.raw(`cells->>'${propertyId}'`), '!=', '');
|
||||
}
|
||||
|
||||
if (value === undefined || value === null) return query;
|
||||
|
||||
// contains / notContains - text search
|
||||
if (operator === 'contains') {
|
||||
return query.where(
|
||||
sql.raw(`cells->>'${propertyId}'`),
|
||||
'ilike',
|
||||
`%${escapeIlike(String(value))}%`,
|
||||
);
|
||||
}
|
||||
if (operator === 'notContains') {
|
||||
return query.where(({ or, eb }) =>
|
||||
or([
|
||||
eb(sql.raw(`cells->>'${propertyId}'`), 'is', null),
|
||||
eb(
|
||||
sql.raw(`cells->>'${propertyId}'`),
|
||||
'not ilike',
|
||||
`%${escapeIlike(String(value))}%`,
|
||||
),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
// equals / notEquals
|
||||
if (operator === 'equals') {
|
||||
if (isArray) {
|
||||
return query.where(
|
||||
sql<SqlBool>`cells->'${sql.raw(propertyId)}' @> ${JSON.stringify([value])}::jsonb`,
|
||||
);
|
||||
}
|
||||
if (propertyType === 'number') {
|
||||
return query.where(
|
||||
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::numeric = ${Number(value)}`,
|
||||
);
|
||||
}
|
||||
if (propertyType === 'checkbox') {
|
||||
return query.where(
|
||||
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::boolean = ${Boolean(value)}`,
|
||||
);
|
||||
}
|
||||
return query.where(sql.raw(`cells->>'${propertyId}'`), '=', String(value));
|
||||
}
|
||||
|
||||
if (operator === 'notEquals') {
|
||||
if (isArray) {
|
||||
return query.where(({ or, eb }) =>
|
||||
or([
|
||||
eb(sql.raw(`cells->'${propertyId}'`), 'is', null),
|
||||
sql<SqlBool>`NOT (cells->'${sql.raw(propertyId)}' @> ${JSON.stringify([value])}::jsonb)`,
|
||||
]),
|
||||
);
|
||||
}
|
||||
if (propertyType === 'number') {
|
||||
return query.where(
|
||||
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::numeric != ${Number(value)}`,
|
||||
);
|
||||
}
|
||||
if (propertyType === 'checkbox') {
|
||||
return query.where(
|
||||
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::boolean != ${Boolean(value)}`,
|
||||
);
|
||||
}
|
||||
return query.where(({ or, eb }) =>
|
||||
or([
|
||||
eb(sql.raw(`cells->>'${propertyId}'`), 'is', null),
|
||||
eb(sql.raw(`cells->>'${propertyId}'`), '!=', String(value)),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
// greaterThan / lessThan - number
|
||||
if (operator === 'greaterThan') {
|
||||
return query.where(
|
||||
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::numeric > ${Number(value)}`,
|
||||
);
|
||||
}
|
||||
if (operator === 'lessThan') {
|
||||
return query.where(
|
||||
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::numeric < ${Number(value)}`,
|
||||
);
|
||||
}
|
||||
|
||||
// before / after - date
|
||||
if (operator === 'before') {
|
||||
return query.where(sql.raw(`cells->>'${propertyId}'`), '<', String(value));
|
||||
}
|
||||
if (operator === 'after') {
|
||||
return query.where(sql.raw(`cells->>'${propertyId}'`), '>', String(value));
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
private applyColumnFilter(
|
||||
query: SelectQueryBuilder<DB, 'baseRows', any>,
|
||||
column: string,
|
||||
operator: string,
|
||||
value: unknown,
|
||||
propertyType: string,
|
||||
): SelectQueryBuilder<DB, 'baseRows', any> {
|
||||
if (operator === 'isEmpty') {
|
||||
return query.where(sql.raw(`"${column}"`), 'is', null);
|
||||
}
|
||||
if (operator === 'isNotEmpty') {
|
||||
return query.where(sql.raw(`"${column}"`), 'is not', null);
|
||||
}
|
||||
|
||||
if (value === undefined || value === null) return query;
|
||||
|
||||
if (operator === 'equals') {
|
||||
return query.where(sql.raw(`"${column}"`), '=', value);
|
||||
}
|
||||
if (operator === 'notEquals') {
|
||||
return query.where(({ or, eb }) =>
|
||||
or([
|
||||
eb(sql.raw(`"${column}"`), 'is', null),
|
||||
eb(sql.raw(`"${column}"`), '!=', value),
|
||||
]),
|
||||
);
|
||||
}
|
||||
if (operator === 'before') {
|
||||
return query.where(sql.raw(`"${column}"`), '<', value);
|
||||
}
|
||||
if (operator === 'after') {
|
||||
return query.where(sql.raw(`"${column}"`), '>', value);
|
||||
}
|
||||
|
||||
return query;
|
||||
}
|
||||
|
||||
private applySort(
|
||||
query: SelectQueryBuilder<DB, 'baseRows', any>,
|
||||
sort: { propertyId: string; direction: string },
|
||||
propertyTypeMap: Map<string, string>,
|
||||
): SelectQueryBuilder<DB, 'baseRows', any> {
|
||||
const { propertyId, direction } = sort;
|
||||
const propertyType = propertyTypeMap.get(propertyId);
|
||||
if (!propertyType) return query;
|
||||
|
||||
const dir = direction === 'desc' ? 'desc' : 'asc';
|
||||
|
||||
// System property -> use actual column
|
||||
const systemCol = SYSTEM_COLUMN_MAP[propertyType];
|
||||
if (systemCol) {
|
||||
return query.orderBy(sql.raw(`"${systemCol}"`), sql`${sql.raw(dir)} NULLS LAST`);
|
||||
}
|
||||
|
||||
// Number properties: cast to numeric for proper numeric ordering
|
||||
if (propertyType === 'number') {
|
||||
return query.orderBy(
|
||||
sql`(cells->>'${sql.raw(propertyId)}')::numeric`,
|
||||
sql`${sql.raw(dir)} NULLS LAST`,
|
||||
);
|
||||
}
|
||||
|
||||
// All other properties: use text extraction
|
||||
return query.orderBy(
|
||||
sql.raw(`cells->>'${propertyId}'`),
|
||||
sql`${sql.raw(dir)} NULLS LAST`,
|
||||
);
|
||||
await sql`
|
||||
UPDATE base_rows AS r
|
||||
SET cells = jsonb_set_many(r.cells, u.patch::jsonb),
|
||||
updated_at = now(),
|
||||
last_updated_by_id = coalesce(${opts.actorId ?? null}, r.last_updated_by_id)
|
||||
FROM unnest(${ids}::uuid[], ${patches}::text[]) AS u(row_id, patch)
|
||||
WHERE r.id = u.row_id
|
||||
AND r.base_id = ${opts.baseId}
|
||||
AND r.workspace_id = ${opts.workspaceId}
|
||||
AND r.deleted_at IS NULL
|
||||
`.execute(db);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,57 +9,64 @@ import {
|
||||
} from '@docmost/db/types/entity.types';
|
||||
import { sql } from 'kysely';
|
||||
|
||||
type RepoOpts = { trx?: KyselyTransaction };
|
||||
type WorkspaceOpts = { workspaceId: string } & RepoOpts;
|
||||
|
||||
@Injectable()
|
||||
export class BaseViewRepo {
|
||||
constructor(@InjectKysely() private readonly db: KyselyDB) {}
|
||||
|
||||
async findById(
|
||||
viewId: string,
|
||||
opts?: { trx?: KyselyTransaction },
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<BaseView | undefined> {
|
||||
const db = dbOrTx(this.db, opts?.trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
return db
|
||||
.selectFrom('baseViews')
|
||||
.selectAll()
|
||||
.where('id', '=', viewId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.executeTakeFirst() as Promise<BaseView | undefined>;
|
||||
}
|
||||
|
||||
async findByBaseId(
|
||||
baseId: string,
|
||||
opts?: { trx?: KyselyTransaction },
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<BaseView[]> {
|
||||
const db = dbOrTx(this.db, opts?.trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
return db
|
||||
.selectFrom('baseViews')
|
||||
.selectAll()
|
||||
.where('baseId', '=', baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.orderBy('position', 'asc')
|
||||
.execute() as Promise<BaseView[]>;
|
||||
}
|
||||
|
||||
async countByBaseId(
|
||||
baseId: string,
|
||||
trx?: KyselyTransaction,
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<number> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
const result = await db
|
||||
.selectFrom('baseViews')
|
||||
.select((eb) => eb.fn.countAll<number>().as('count'))
|
||||
.where('baseId', '=', baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.executeTakeFirstOrThrow();
|
||||
return Number(result.count);
|
||||
}
|
||||
|
||||
async getLastPosition(
|
||||
baseId: string,
|
||||
trx?: KyselyTransaction,
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<string | null> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
const result = await db
|
||||
.selectFrom('baseViews')
|
||||
.select('position')
|
||||
.where('baseId', '=', baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.orderBy(sql`position COLLATE "C"`, sql`DESC`)
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
@@ -68,9 +75,9 @@ export class BaseViewRepo {
|
||||
|
||||
async insertView(
|
||||
view: InsertableBaseView,
|
||||
trx?: KyselyTransaction,
|
||||
opts?: RepoOpts,
|
||||
): Promise<BaseView> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const db = dbOrTx(this.db, opts?.trx);
|
||||
return db
|
||||
.insertInto('baseViews')
|
||||
.values(view)
|
||||
@@ -81,24 +88,26 @@ export class BaseViewRepo {
|
||||
async updateView(
|
||||
viewId: string,
|
||||
data: UpdatableBaseView,
|
||||
trx?: KyselyTransaction,
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
await db
|
||||
.updateTable('baseViews')
|
||||
.set({ ...data, updatedAt: new Date() })
|
||||
.where('id', '=', viewId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.execute();
|
||||
}
|
||||
|
||||
async deleteView(
|
||||
viewId: string,
|
||||
trx?: KyselyTransaction,
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<void> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
await db
|
||||
.deleteFrom('baseViews')
|
||||
.where('id', '=', viewId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
} from '@docmost/db/types/entity.types';
|
||||
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
|
||||
import { executeWithCursorPagination } from '@docmost/db/pagination/cursor-pagination';
|
||||
import { ExpressionBuilder } from 'kysely';
|
||||
import { ExpressionBuilder, sql } from 'kysely';
|
||||
import { DB } from '@docmost/db/types/db';
|
||||
import { jsonArrayFrom } from 'kysely/helpers/postgres';
|
||||
|
||||
@@ -120,6 +120,23 @@ export class BaseRepo {
|
||||
.execute();
|
||||
}
|
||||
|
||||
async bumpSchemaVersion(
|
||||
baseId: string,
|
||||
trx?: KyselyTransaction,
|
||||
): Promise<number> {
|
||||
const db = dbOrTx(this.db, trx);
|
||||
const result = await db
|
||||
.updateTable('bases')
|
||||
.set({
|
||||
schemaVersion: sql`schema_version + 1`,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where('id', '=', baseId)
|
||||
.returning('schemaVersion')
|
||||
.executeTakeFirst();
|
||||
return result?.schemaVersion ?? 0;
|
||||
}
|
||||
|
||||
private withProperties(eb: ExpressionBuilder<DB, 'bases'>) {
|
||||
return jsonArrayFrom(
|
||||
eb
|
||||
|
||||
+7
@@ -434,6 +434,7 @@ export interface Bases {
|
||||
createdAt: Generated<Timestamp>;
|
||||
updatedAt: Generated<Timestamp>;
|
||||
deletedAt: Timestamp | null;
|
||||
schemaVersion: Generated<number>;
|
||||
}
|
||||
|
||||
export interface BaseProperties {
|
||||
@@ -443,10 +444,14 @@ export interface BaseProperties {
|
||||
type: string;
|
||||
position: string;
|
||||
typeOptions: Json | null;
|
||||
pendingType: string | null;
|
||||
pendingTypeOptions: Json | null;
|
||||
isPrimary: Generated<boolean>;
|
||||
workspaceId: string;
|
||||
createdAt: Generated<Timestamp>;
|
||||
updatedAt: Generated<Timestamp>;
|
||||
schemaVersion: Generated<number>;
|
||||
deletedAt: Timestamp | null;
|
||||
}
|
||||
|
||||
export interface BaseRows {
|
||||
@@ -460,6 +465,8 @@ export interface BaseRows {
|
||||
createdAt: Generated<Timestamp>;
|
||||
updatedAt: Generated<Timestamp>;
|
||||
deletedAt: Timestamp | null;
|
||||
searchText: string | null;
|
||||
searchTsv: string | null;
|
||||
}
|
||||
|
||||
export interface BaseViews {
|
||||
|
||||
@@ -223,9 +223,18 @@ export type InsertableBaseProperty = Insertable<BaseProperties>;
|
||||
export type UpdatableBaseProperty = Updateable<Omit<BaseProperties, 'id'>>;
|
||||
|
||||
// Base Row
|
||||
export type BaseRow = Selectable<BaseRows>;
|
||||
export type InsertableBaseRow = Insertable<BaseRows>;
|
||||
export type UpdatableBaseRow = Updateable<Omit<BaseRows, 'id'>>;
|
||||
// `searchText` and `searchTsv` are internal fulltext-index columns maintained
|
||||
// by a trigger. They are omitted from the public types so they never leak into
|
||||
// HTTP responses or write payloads.
|
||||
export type BaseRow = Omit<Selectable<BaseRows>, 'searchText' | 'searchTsv'>;
|
||||
export type InsertableBaseRow = Omit<
|
||||
Insertable<BaseRows>,
|
||||
'searchText' | 'searchTsv'
|
||||
>;
|
||||
export type UpdatableBaseRow = Omit<
|
||||
Updateable<Omit<BaseRows, 'id'>>,
|
||||
'searchText' | 'searchTsv'
|
||||
>;
|
||||
|
||||
// Base View
|
||||
export type BaseView = Selectable<BaseViews>;
|
||||
|
||||
@@ -9,6 +9,7 @@ export enum QueueName {
|
||||
HISTORY_QUEUE = '{history-queue}',
|
||||
NOTIFICATION_QUEUE = '{notification-queue}',
|
||||
AUDIT_QUEUE = '{audit-queue}',
|
||||
BASE_QUEUE = '{base-queue}',
|
||||
}
|
||||
|
||||
export enum QueueJob {
|
||||
@@ -83,4 +84,7 @@ export enum QueueJob {
|
||||
|
||||
PDF_EXPORT_TASK = 'pdf-export-task',
|
||||
PDF_EXPORT_CLEANUP = 'pdf-export-cleanup',
|
||||
|
||||
BASE_TYPE_CONVERSION = 'base-type-conversion',
|
||||
BASE_CELL_GC = 'base-cell-gc',
|
||||
}
|
||||
|
||||
@@ -113,3 +113,27 @@ export interface IApprovalRejectedNotificationJob {
|
||||
requestedById: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export interface IBaseTypeConversionJob {
|
||||
baseId: string;
|
||||
propertyId: string;
|
||||
workspaceId: string;
|
||||
fromType: string;
|
||||
toType: string;
|
||||
// Snapshots taken at enqueue time so the job stays correct even if the
|
||||
// property's current typeOptions drift while the job waits in the queue.
|
||||
fromTypeOptions: unknown;
|
||||
toTypeOptions: unknown;
|
||||
// When true, the job nulls the cell values for that property instead of
|
||||
// attempting a value conversion. Used for any conversion where the new
|
||||
// type has no meaningful representation of the old value (e.g. involving
|
||||
// a system type).
|
||||
clearMode: boolean;
|
||||
actorId?: string;
|
||||
}
|
||||
|
||||
export interface IBaseCellGcJob {
|
||||
baseId: string;
|
||||
propertyId: string;
|
||||
workspaceId: string;
|
||||
}
|
||||
|
||||
@@ -92,6 +92,14 @@ import { GeneralQueueProcessor } from './processors/general-queue.processor';
|
||||
attempts: 3,
|
||||
},
|
||||
}),
|
||||
BullModule.registerQueue({
|
||||
name: QueueName.BASE_QUEUE,
|
||||
defaultJobOptions: {
|
||||
attempts: 2,
|
||||
removeOnComplete: { count: 200 },
|
||||
removeOnFail: { count: 100 },
|
||||
},
|
||||
}),
|
||||
],
|
||||
exports: [BullModule],
|
||||
providers: [GeneralQueueProcessor],
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import {
|
||||
MessageBody,
|
||||
OnGatewayConnection,
|
||||
OnGatewayDisconnect,
|
||||
OnGatewayInit,
|
||||
SubscribeMessage,
|
||||
WebSocketGateway,
|
||||
@@ -13,6 +14,7 @@ import { OnModuleDestroy } from '@nestjs/common';
|
||||
import { SpaceMemberRepo } from '@docmost/db/repos/space/space-member.repo';
|
||||
import { WsService } from './ws.service';
|
||||
import { getSpaceRoomName, getUserRoomName } from './ws.utils';
|
||||
import { BaseWsService } from '../core/base/realtime/base-ws.service';
|
||||
import * as cookie from 'cookie';
|
||||
|
||||
@WebSocketGateway({
|
||||
@@ -20,7 +22,11 @@ import * as cookie from 'cookie';
|
||||
transports: ['websocket'],
|
||||
})
|
||||
export class WsGateway
|
||||
implements OnGatewayConnection, OnGatewayInit, OnModuleDestroy
|
||||
implements
|
||||
OnGatewayConnection,
|
||||
OnGatewayDisconnect,
|
||||
OnGatewayInit,
|
||||
OnModuleDestroy
|
||||
{
|
||||
@WebSocketServer()
|
||||
server: Server;
|
||||
@@ -29,10 +35,12 @@ export class WsGateway
|
||||
private tokenService: TokenService,
|
||||
private spaceMemberRepo: SpaceMemberRepo,
|
||||
private wsService: WsService,
|
||||
private baseWsService: BaseWsService,
|
||||
) {}
|
||||
|
||||
afterInit(server: Server): void {
|
||||
this.wsService.setServer(server);
|
||||
this.baseWsService.setServer(server);
|
||||
}
|
||||
|
||||
async handleConnection(client: Socket, ...args: any[]): Promise<void> {
|
||||
@@ -47,6 +55,7 @@ export class WsGateway
|
||||
const workspaceId = token.workspaceId;
|
||||
|
||||
client.data.userId = userId;
|
||||
client.data.workspaceId = workspaceId;
|
||||
|
||||
const userSpaceIds = await this.spaceMemberRepo.getUserSpaceIds(userId);
|
||||
|
||||
@@ -61,10 +70,19 @@ export class WsGateway
|
||||
}
|
||||
}
|
||||
|
||||
async handleDisconnect(client: Socket): Promise<void> {
|
||||
await this.baseWsService.handleDisconnect(client);
|
||||
}
|
||||
|
||||
@SubscribeMessage('message')
|
||||
async handleMessage(client: Socket, data: any): Promise<void> {
|
||||
if (this.wsService.isTreeEvent(data)) {
|
||||
await this.wsService.handleTreeEvent(client, data);
|
||||
return;
|
||||
}
|
||||
if (this.baseWsService.isBaseEvent(data)) {
|
||||
await this.baseWsService.handleInbound(client, data);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,10 +3,11 @@ import { WsGateway } from './ws.gateway';
|
||||
import { WsService } from './ws.service';
|
||||
import { WsTreeService } from './ws-tree.service';
|
||||
import { TokenModule } from '../core/auth/token.module';
|
||||
import { BaseModule } from '../core/base/base.module';
|
||||
|
||||
@Global()
|
||||
@Module({
|
||||
imports: [TokenModule],
|
||||
imports: [TokenModule, BaseModule],
|
||||
providers: [WsGateway, WsService, WsTreeService],
|
||||
exports: [WsGateway, WsService, WsTreeService],
|
||||
})
|
||||
|
||||
@@ -16,3 +16,14 @@ export const TREE_EVENTS = new Set([
|
||||
'deleteTreeNode',
|
||||
'refetchRootTreeNodeEvent',
|
||||
]);
|
||||
|
||||
export function getBaseRoomName(baseId: string): string {
|
||||
return `base-${baseId}`;
|
||||
}
|
||||
|
||||
export const BASE_INBOUND_EVENTS = new Set([
|
||||
'base:subscribe',
|
||||
'base:unsubscribe',
|
||||
'base:presence',
|
||||
'base:presence:leave',
|
||||
]);
|
||||
|
||||
Reference in New Issue
Block a user