This commit is contained in:
Philipinho
2026-04-18 13:13:53 +01:00
parent 081bb67239
commit f5b19316af
53 changed files with 4057 additions and 813 deletions
+25 -2
View File
@@ -1,4 +1,5 @@
import { Module } from '@nestjs/common';
import { BullModule } from '@nestjs/bullmq';
import { BaseController } from './controllers/base.controller';
import { BasePropertyController } from './controllers/base-property.controller';
import { BaseRowController } from './controllers/base-row.controller';
@@ -7,15 +8,37 @@ import { BaseService } from './services/base.service';
import { BasePropertyService } from './services/base-property.service';
import { BaseRowService } from './services/base-row.service';
import { BaseViewService } from './services/base-view.service';
import { BaseQueueProcessor } from './processors/base-queue.processor';
import { BaseWsService } from './realtime/base-ws.service';
import { BaseWsConsumers } from './realtime/base-ws-consumers';
import { BasePresenceService } from './realtime/base-presence.service';
import { QueueName } from '../../integrations/queue/constants';
@Module({
imports: [BullModule.registerQueue({ name: QueueName.BASE_QUEUE })],
controllers: [
BaseController,
BasePropertyController,
BaseRowController,
BaseViewController,
],
providers: [BaseService, BasePropertyService, BaseRowService, BaseViewService],
exports: [BaseService, BasePropertyService, BaseRowService, BaseViewService],
providers: [
BaseService,
BasePropertyService,
BaseRowService,
BaseViewService,
BaseQueueProcessor,
BasePresenceService,
BaseWsService,
BaseWsConsumers,
],
exports: [
BaseService,
BasePropertyService,
BaseRowService,
BaseViewService,
BaseWsService,
BasePresenceService,
],
})
export class BaseModule {}
+122 -23
View File
@@ -33,7 +33,7 @@ export type BasePropertyTypeValue =
export const BASE_PROPERTY_TYPES = Object.values(BasePropertyType);
export const choiceSchema = z.object({
id: z.string().uuid(),
id: z.uuid(),
name: z.string().min(1),
color: z.string(),
category: z.enum(['todo', 'inProgress', 'complete']).optional(),
@@ -42,10 +42,10 @@ export const choiceSchema = z.object({
export const selectTypeOptionsSchema = z
.object({
choices: z.array(choiceSchema).default([]),
choiceOrder: z.array(z.string().uuid()).default([]),
choiceOrder: z.array(z.uuid()).default([]),
disableColors: z.boolean().optional(),
defaultValue: z
.union([z.string().uuid(), z.array(z.string().uuid())])
.union([z.uuid(), z.array(z.uuid())])
.nullable()
.optional(),
})
@@ -147,21 +147,21 @@ export function parseTypeOptions(
const cellValueSchemaMap: Partial<Record<BasePropertyTypeValue, z.ZodType>> = {
[BasePropertyType.TEXT]: z.string(),
[BasePropertyType.NUMBER]: z.number(),
[BasePropertyType.SELECT]: z.string().uuid(),
[BasePropertyType.STATUS]: z.string().uuid(),
[BasePropertyType.MULTI_SELECT]: z.array(z.string().uuid()),
[BasePropertyType.SELECT]: z.uuid(),
[BasePropertyType.STATUS]: z.uuid(),
[BasePropertyType.MULTI_SELECT]: z.array(z.uuid()),
[BasePropertyType.DATE]: z.string(),
[BasePropertyType.PERSON]: z.union([z.string().uuid(), z.array(z.string().uuid())]),
[BasePropertyType.PERSON]: z.union([z.uuid(), z.array(z.uuid())]),
[BasePropertyType.FILE]: z.array(z.object({
id: z.string().uuid(),
id: z.uuid(),
fileName: z.string(),
mimeType: z.string().optional(),
fileSize: z.number().optional(),
filePath: z.string().optional(),
})),
[BasePropertyType.CHECKBOX]: z.boolean(),
[BasePropertyType.URL]: z.string().url(),
[BasePropertyType.EMAIL]: z.string().email(),
[BasePropertyType.URL]: z.url(),
[BasePropertyType.EMAIL]: z.email(),
};
export function getCellValueSchema(
@@ -181,15 +181,83 @@ export function validateCellValue(
return schema.safeParse(value);
}
/*
* Resolution context for conversions where the source type stores IDs
* (select / multiSelect: choice uuid; person: user uuid; file: attachment
* uuid). Callers must always supply this — the only invoker is the
* `BASE_TYPE_CONVERSION` BullMQ worker, which builds the context per
* chunk of rows (see `tasks/base-type-conversion.task.ts`).
*/
export type CellConversionContext = {
fromTypeOptions?: unknown;
userNames?: Map<string, string>;
attachmentNames?: Map<string, string>;
};
function resolveChoiceName(
typeOptions: unknown,
id: unknown,
): string | undefined {
if (!typeOptions || typeof typeOptions !== 'object') return undefined;
const choices = (typeOptions as any).choices;
if (!Array.isArray(choices)) return undefined;
const match = choices.find((c: any) => c?.id === String(id));
return typeof match?.name === 'string' ? match.name : undefined;
}
export function attemptCellConversion(
fromType: BasePropertyTypeValue,
toType: BasePropertyTypeValue,
value: unknown,
ctx: CellConversionContext,
): { converted: boolean; value: unknown } {
if (value === null || value === undefined) {
return { converted: true, value: null };
}
// Resolve IDs to display strings before any direct parse. `select → text`
// and `multiSelect → text` would otherwise short-circuit on z.string()
// parsing the UUID itself and return the raw UUID instead of the name.
if (toType === BasePropertyType.TEXT) {
if (
fromType === BasePropertyType.SELECT ||
fromType === BasePropertyType.STATUS
) {
const name = resolveChoiceName(ctx.fromTypeOptions, value);
return { converted: true, value: name ?? '' };
}
if (fromType === BasePropertyType.MULTI_SELECT && Array.isArray(value)) {
const parts = value
.map((v) => resolveChoiceName(ctx.fromTypeOptions, v))
.filter((v): v is string => typeof v === 'string' && v.length > 0);
return { converted: true, value: parts.join(', ') };
}
if (fromType === BasePropertyType.PERSON && ctx.userNames) {
const ids = Array.isArray(value) ? value : [value];
const parts = ids
.map((v) => ctx.userNames!.get(String(v)))
.filter((v): v is string => typeof v === 'string' && v.length > 0);
return { converted: true, value: parts.join(', ') };
}
if (fromType === BasePropertyType.FILE && Array.isArray(value)) {
const parts = value
.map((f: any) => {
if (f && typeof f === 'object') {
if (typeof f.fileName === 'string') return f.fileName;
if (typeof f.id === 'string' && ctx.attachmentNames) {
return ctx.attachmentNames.get(f.id);
}
}
if (typeof f === 'string' && ctx.attachmentNames) {
return ctx.attachmentNames.get(f);
}
return undefined;
})
.filter((v): v is string => typeof v === 'string' && v.length > 0);
return { converted: true, value: parts.join(', ') };
}
}
const targetSchema = cellValueSchemaMap[toType];
if (!targetSchema) {
return { converted: false, value: null };
@@ -247,35 +315,66 @@ export function attemptCellConversion(
}
export const viewSortSchema = z.object({
propertyId: z.string().uuid(),
propertyId: z.uuid(),
direction: z.enum(['asc', 'desc']),
});
export const viewFilterSchema = z.object({
propertyId: z.string().uuid(),
operator: z.enum([
'equals',
'notEquals',
/*
* View-stored filter shape matches the engine's predicate tree (see
* `core/base/engine/schema.zod.ts`). No legacy flat-array / operator-name
* variants are accepted — stored view configs use `op` (eq / neq / gt /
* lt / contains / ncontains / ...) and nested and/or groups.
*/
const viewFilterConditionSchema = z.object({
propertyId: z.uuid(),
op: z.enum([
'eq',
'neq',
'gt',
'gte',
'lt',
'lte',
'contains',
'notContains',
'ncontains',
'startsWith',
'endsWith',
'isEmpty',
'isNotEmpty',
'greaterThan',
'lessThan',
'before',
'after',
'onOrBefore',
'onOrAfter',
'any',
'none',
'all',
]),
value: z.unknown().optional(),
});
type ViewFilterCondition = z.infer<typeof viewFilterConditionSchema>;
type ViewFilterGroup = {
op: 'and' | 'or';
children: Array<ViewFilterCondition | ViewFilterGroup>;
};
const viewFilterNodeSchema: z.ZodType<ViewFilterCondition | ViewFilterGroup> =
z.lazy(() => z.union([viewFilterConditionSchema, viewFilterGroupSchema]));
const viewFilterGroupSchema: z.ZodType<ViewFilterGroup> = z.lazy(() =>
z.object({
op: z.enum(['and', 'or']),
children: z.array(viewFilterNodeSchema),
}),
);
export const viewConfigSchema = z
.object({
sorts: z.array(viewSortSchema).optional(),
filters: z.array(viewFilterSchema).optional(),
visiblePropertyIds: z.array(z.string().uuid()).optional(),
hiddenPropertyIds: z.array(z.string().uuid()).optional(),
filter: viewFilterGroupSchema.optional(),
visiblePropertyIds: z.array(z.uuid()).optional(),
hiddenPropertyIds: z.array(z.uuid()).optional(),
propertyWidths: z.record(z.string(), z.number().positive()).optional(),
propertyOrder: z.array(z.string().uuid()).optional(),
propertyOrder: z.array(z.uuid()).optional(),
})
.passthrough();
@@ -52,12 +52,16 @@ export class BasePropertyController {
throw new ForbiddenException();
}
return this.basePropertyService.create(workspace.id, dto);
return this.basePropertyService.create(workspace.id, dto, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('update')
async update(@Body() dto: UpdatePropertyDto, @AuthUser() user: User) {
async update(
@Body() dto: UpdatePropertyDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
@@ -68,12 +72,16 @@ export class BasePropertyController {
throw new ForbiddenException();
}
return this.basePropertyService.update(dto);
return this.basePropertyService.update(dto, workspace.id, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('delete')
async delete(@Body() dto: DeletePropertyDto, @AuthUser() user: User) {
async delete(
@Body() dto: DeletePropertyDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
@@ -84,12 +92,16 @@ export class BasePropertyController {
throw new ForbiddenException();
}
await this.basePropertyService.delete(dto);
await this.basePropertyService.delete(dto, workspace.id, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('reorder')
async reorder(@Body() dto: ReorderPropertyDto, @AuthUser() user: User) {
async reorder(
@Body() dto: ReorderPropertyDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
@@ -100,6 +112,6 @@ export class BasePropertyController {
throw new ForbiddenException();
}
await this.basePropertyService.reorder(dto);
await this.basePropertyService.reorder(dto, workspace.id, user.id);
}
}
@@ -60,58 +60,10 @@ export class BaseRowController {
@HttpCode(HttpStatus.OK)
@Post('info')
async getRow(@Body() dto: RowIdDto, @AuthUser() user: User) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
return this.baseRowService.getRowInfo(dto.rowId, dto.baseId);
}
@HttpCode(HttpStatus.OK)
@Post('update')
async update(@Body() dto: UpdateRowDto, @AuthUser() user: User) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
return this.baseRowService.update(dto, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('delete')
async delete(@Body() dto: DeleteRowDto, @AuthUser() user: User) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
await this.baseRowService.delete(dto.rowId, dto.baseId);
}
@HttpCode(HttpStatus.OK)
@Post('list')
async list(
@Body() dto: ListRowsDto,
@Body() pagination: PaginationOptions,
async getRow(
@Body() dto: RowIdDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
@@ -123,12 +75,16 @@ export class BaseRowController {
throw new ForbiddenException();
}
return this.baseRowService.list(dto, pagination);
return this.baseRowService.getRowInfo(dto.rowId, dto.baseId, workspace.id);
}
@HttpCode(HttpStatus.OK)
@Post('reorder')
async reorder(@Body() dto: ReorderRowDto, @AuthUser() user: User) {
@Post('update')
async update(
@Body() dto: UpdateRowDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
@@ -139,6 +95,67 @@ export class BaseRowController {
throw new ForbiddenException();
}
await this.baseRowService.reorder(dto);
return this.baseRowService.update(dto, workspace.id, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('delete')
async delete(
@Body() dto: DeleteRowDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
await this.baseRowService.delete(dto, workspace.id, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('list')
async list(
@Body() dto: ListRowsDto,
@Body() pagination: PaginationOptions,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
return this.baseRowService.list(dto, pagination, workspace.id);
}
@HttpCode(HttpStatus.OK)
@Post('reorder')
async reorder(
@Body() dto: ReorderRowDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
await this.baseRowService.reorder(dto, workspace.id, user.id);
}
}
@@ -54,7 +54,11 @@ export class BaseViewController {
@HttpCode(HttpStatus.OK)
@Post('update')
async update(@Body() dto: UpdateViewDto, @AuthUser() user: User) {
async update(
@Body() dto: UpdateViewDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
@@ -65,12 +69,16 @@ export class BaseViewController {
throw new ForbiddenException();
}
return this.baseViewService.update(dto);
return this.baseViewService.update(dto, workspace.id, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('delete')
async delete(@Body() dto: DeleteViewDto, @AuthUser() user: User) {
async delete(
@Body() dto: DeleteViewDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
@@ -81,12 +89,16 @@ export class BaseViewController {
throw new ForbiddenException();
}
await this.baseViewService.delete(dto);
await this.baseViewService.delete(dto, workspace.id, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('list')
async list(@Body() dto: BaseIdDto, @AuthUser() user: User) {
async list(
@Body() dto: BaseIdDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
@@ -97,6 +109,6 @@ export class BaseViewController {
throw new ForbiddenException();
}
return this.baseViewService.listByBaseId(dto.baseId);
return this.baseViewService.listByBaseId(dto.baseId, workspace.id);
}
}
@@ -9,6 +9,12 @@ export class CreateRowDto {
cells?: Record<string, unknown>;
@IsOptional()
@IsString()
@IsUUID()
afterRowId?: string;
// Echoed back in the socket event so the originating client can skip
// replaying its own write.
@IsOptional()
@IsString()
requestId?: string;
}
@@ -27,6 +27,10 @@ export class UpdatePropertyDto {
@IsOptional()
@IsObject()
typeOptions?: Record<string, unknown>;
@IsOptional()
@IsString()
requestId?: string;
}
export class DeletePropertyDto {
@@ -35,6 +39,10 @@ export class DeletePropertyDto {
@IsUUID()
baseId: string;
@IsOptional()
@IsString()
requestId?: string;
}
export class ReorderPropertyDto {
@@ -47,4 +55,8 @@ export class ReorderPropertyDto {
@IsString()
@IsNotEmpty()
position: string;
@IsOptional()
@IsString()
requestId?: string;
}
+35 -20
View File
@@ -1,5 +1,16 @@
import { IsNotEmpty, IsObject, IsOptional, IsString, IsUUID, IsArray, ValidateNested } from 'class-validator';
import {
IsIn,
IsNotEmpty,
IsObject,
IsOptional,
IsString,
IsUUID,
IsArray,
ValidateNested,
} from 'class-validator';
import { Type } from 'class-transformer';
// `filter` / `search` shapes are validated by the engine's Zod schemas
// at the service boundary (`core/base/engine/schema.zod.ts`).
export class UpdateRowDto {
@IsUUID()
@@ -10,6 +21,10 @@ export class UpdateRowDto {
@IsObject()
cells: Record<string, unknown>;
@IsOptional()
@IsString()
requestId?: string;
}
export class DeleteRowDto {
@@ -18,6 +33,10 @@ export class DeleteRowDto {
@IsUUID()
baseId: string;
@IsOptional()
@IsString()
requestId?: string;
}
export class RowIdDto {
@@ -28,25 +47,12 @@ export class RowIdDto {
baseId: string;
}
class FilterDto {
@IsUUID()
propertyId: string;
@IsString()
@IsNotEmpty()
operator: string;
@IsOptional()
value?: unknown;
}
class SortDto {
@IsUUID()
propertyId: string;
@IsString()
@IsNotEmpty()
direction: string;
@IsIn(['asc', 'desc'])
direction: 'asc' | 'desc';
}
export class ListRowsDto {
@@ -57,17 +63,22 @@ export class ListRowsDto {
@IsUUID()
viewId?: string;
// Compound filter tree. Shape validated by the engine's Zod schema at
// the service boundary.
@IsOptional()
@IsArray()
@ValidateNested({ each: true })
@Type(() => FilterDto)
filters?: FilterDto[];
@IsObject()
filter?: unknown;
@IsOptional()
@IsArray()
@ValidateNested({ each: true })
@Type(() => SortDto)
sorts?: SortDto[];
// `{ query, mode? }` — Zod-validated at the service boundary.
@IsOptional()
@IsObject()
search?: unknown;
}
export class ReorderRowDto {
@@ -80,4 +91,8 @@ export class ReorderRowDto {
@IsString()
@IsNotEmpty()
position: string;
@IsOptional()
@IsString()
requestId?: string;
}
+111
View File
@@ -0,0 +1,111 @@
import { BadRequestException } from '@nestjs/common';
import { SortBuild, TailKey } from './sort';
type ValueType = 'numeric' | 'date' | 'bool' | 'text';
// Hard cap on decoded cursor size so a tampered cursor can't force a large
// JSON parse. Real cursors are <1KB (a handful of field values).
const MAX_CURSOR_DECODED_BYTES = 4096;
/*
* Null-safe cursor encoder. The previous encoder used a literal string
* sentinel `__null__` for NULLs, which could collide with real cell
* values. This encoder never sees NULL because sort expressions are
* sentinel-wrapped (see sort.ts). It also represents ±Infinity
* explicitly so JSON round-tripping is lossless.
*/
export function makeCursor(sorts: SortBuild[], tailKeys: TailKey[]) {
const types = new Map<string, ValueType>();
for (const s of sorts) types.set(s.key, s.valueType);
for (const k of tailKeys) types.set(k, 'text');
return {
encodeCursor(values: Array<[string, unknown]>): string {
const payload: Record<string, string> = {};
for (const [k, v] of values) {
payload[k] = encodeValue(v, types.get(k) ?? 'text');
}
return Buffer.from(JSON.stringify(payload), 'utf8').toString('base64url');
},
decodeCursor(
cursor: string,
fieldNames: string[],
): Record<string, string> {
let parsed: Record<string, string>;
try {
parsed = JSON.parse(
Buffer.from(cursor, 'base64url').toString('utf8'),
);
} catch {
throw new BadRequestException('Invalid cursor');
}
if (typeof parsed !== 'object' || parsed === null) {
throw new BadRequestException('Invalid cursor payload');
}
const out: Record<string, string> = {};
for (const name of fieldNames) {
if (!(name in parsed)) {
throw new BadRequestException(`Cursor missing field: ${name}`);
}
out[name] = parsed[name];
}
return out;
},
parseCursor(decoded: Record<string, string>): Record<string, unknown> {
const out: Record<string, unknown> = {};
for (const [k, raw] of Object.entries(decoded)) {
out[k] = decodeValue(raw, types.get(k) ?? 'text');
}
return out;
},
};
}
function encodeValue(value: unknown, type: ValueType): string {
if (type === 'numeric') {
if (value === null || value === undefined) return '';
const n = typeof value === 'number' ? value : parseFloat(String(value));
if (n === Number.POSITIVE_INFINITY || String(value) === 'Infinity') {
return 'inf';
}
if (n === Number.NEGATIVE_INFINITY || String(value) === '-Infinity') {
return '-inf';
}
if (Number.isNaN(n)) return '';
return String(n);
}
if (type === 'date') {
if (value === null || value === undefined) return '';
if (value instanceof Date) return value.toISOString();
const s = String(value);
if (s === 'infinity') return 'inf';
if (s === '-infinity') return '-inf';
return s;
}
if (type === 'bool') {
return value ? '1' : '0';
}
return value == null ? '' : String(value);
}
function decodeValue(raw: string, type: ValueType): unknown {
if (type === 'numeric') {
if (raw === 'inf') return Number.POSITIVE_INFINITY;
if (raw === '-inf') return Number.NEGATIVE_INFINITY;
if (raw === '') return null;
return parseFloat(raw);
}
if (type === 'date') {
if (raw === 'inf') return 'infinity';
if (raw === '-inf') return '-infinity';
if (raw === '') return null;
return raw;
}
if (type === 'bool') {
return raw === '1';
}
return raw;
}
@@ -0,0 +1,86 @@
import { SelectQueryBuilder } from 'kysely';
import { DB } from '@docmost/db/types/db';
import { BaseRow } from '@docmost/db/types/entity.types';
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
import {
CursorPaginationResult,
executeWithCursorPagination,
} from '@docmost/db/pagination/cursor-pagination';
import { FilterNode, SearchSpec, SortSpec } from './schema.zod';
import { buildWhere, PropertySchema } from './predicate';
import { buildSorts, CURSOR_TAIL_KEYS, SortBuild } from './sort';
import { buildSearch } from './search';
import { makeCursor } from './cursor';
export type EngineListOpts = {
filter?: FilterNode;
sorts?: SortSpec[];
search?: SearchSpec;
schema: PropertySchema;
pagination: PaginationOptions;
};
/*
* Top-level orchestrator. Callers (repos, services) provide a base
* Kysely query already scoped to the target base + workspace + alive
* rows; this adds search/filter/sort clauses and runs cursor pagination.
*/
export async function runListQuery(
base: SelectQueryBuilder<DB, 'baseRows', any>,
opts: EngineListOpts,
): Promise<CursorPaginationResult<BaseRow>> {
let qb = base;
if (opts.search) {
const spec = opts.search;
qb = qb.where((eb) => buildSearch(eb, spec));
}
if (opts.filter) {
const filter = opts.filter;
qb = qb.where((eb) => buildWhere(eb, filter, opts.schema));
}
const sortBuilds: SortBuild[] =
opts.sorts && opts.sorts.length > 0
? buildSorts(opts.sorts, opts.schema)
: [];
for (const sb of sortBuilds) {
qb = qb.select(sb.expression.as(sb.key)) as SelectQueryBuilder<
DB,
'baseRows',
any
>;
}
const cursor = makeCursor(sortBuilds, CURSOR_TAIL_KEYS);
const fields = [
...sortBuilds.map((sb) => ({
expression: sb.expression,
direction: sb.direction,
key: sb.key,
})),
{
expression: 'position' as const,
direction: 'asc' as const,
key: 'position' as const,
},
{
expression: 'id' as const,
direction: 'asc' as const,
key: 'id' as const,
},
];
return executeWithCursorPagination(qb as any, {
perPage: opts.pagination.limit,
cursor: opts.pagination.cursor,
beforeCursor: opts.pagination.beforeCursor,
fields: fields as any,
encodeCursor: cursor.encodeCursor as any,
decodeCursor: cursor.decodeCursor as any,
parseCursor: cursor.parseCursor as any,
}) as unknown as Promise<CursorPaginationResult<BaseRow>>;
}
@@ -0,0 +1,32 @@
import { sql, RawBuilder } from 'kysely';
/*
* Parameterised extractors wrapping the SQL helper functions installed
* by the bases-hardening migration. PropertyId always binds as a
* parameter — never string-interpolated. These replace every
* `sql.raw('cells->>...')` site in the old repo.
*/
export function textCell(propertyId: string): RawBuilder<string> {
return sql<string>`base_cell_text(cells, ${propertyId}::uuid)`;
}
export function numericCell(propertyId: string): RawBuilder<number> {
return sql<number>`base_cell_numeric(cells, ${propertyId}::uuid)`;
}
export function dateCell(propertyId: string): RawBuilder<Date> {
return sql<Date>`base_cell_timestamptz(cells, ${propertyId}::uuid)`;
}
export function boolCell(propertyId: string): RawBuilder<boolean> {
return sql<boolean>`base_cell_bool(cells, ${propertyId}::uuid)`;
}
export function arrayCell(propertyId: string): RawBuilder<unknown> {
return sql<unknown>`base_cell_array(cells, ${propertyId}::uuid)`;
}
export function escapeIlike(value: string): string {
return value.replace(/[%_\\]/g, '\\$&');
}
+44
View File
@@ -0,0 +1,44 @@
export {
MAX_FILTER_DEPTH,
MAX_FILTER_NODES,
MAX_SORTS,
conditionSchema,
filterGroupSchema,
filterNodeSchema,
listQuerySchema,
operatorSchema,
searchSchema,
sortSpecSchema,
sortsSchema,
validateFilterTree,
} from './schema.zod';
export type {
Condition,
FilterGroup,
FilterNode,
ListQuery,
Operator,
SearchSpec,
SortSpec,
} from './schema.zod';
export {
PropertyKind,
SYSTEM_COLUMN,
isSystemType,
propertyKind,
} from './kinds';
export type { PropertyKindValue } from './kinds';
export { buildWhere } from './predicate';
export type { PropertySchema } from './predicate';
export { buildSorts, CURSOR_TAIL_KEYS } from './sort';
export type { SortBuild, TailKey } from './sort';
export { makeCursor } from './cursor';
export { buildSearch } from './search';
export { runListQuery } from './engine';
export type { EngineListOpts } from './engine';
+57
View File
@@ -0,0 +1,57 @@
import { BasePropertyType } from '../base.schemas';
export const PropertyKind = {
TEXT: 'text',
NUMERIC: 'numeric',
DATE: 'date',
BOOL: 'bool',
SELECT: 'select',
MULTI: 'multi',
PERSON: 'person',
FILE: 'file',
SYS_USER: 'sys_user',
} as const;
export type PropertyKindValue = (typeof PropertyKind)[keyof typeof PropertyKind];
export function propertyKind(type: string): PropertyKindValue | null {
switch (type) {
case BasePropertyType.TEXT:
case BasePropertyType.URL:
case BasePropertyType.EMAIL:
return PropertyKind.TEXT;
case BasePropertyType.NUMBER:
return PropertyKind.NUMERIC;
case BasePropertyType.DATE:
case BasePropertyType.CREATED_AT:
case BasePropertyType.LAST_EDITED_AT:
return PropertyKind.DATE;
case BasePropertyType.CHECKBOX:
return PropertyKind.BOOL;
case BasePropertyType.SELECT:
case BasePropertyType.STATUS:
return PropertyKind.SELECT;
case BasePropertyType.MULTI_SELECT:
return PropertyKind.MULTI;
case BasePropertyType.PERSON:
return PropertyKind.PERSON;
case BasePropertyType.FILE:
return PropertyKind.FILE;
case BasePropertyType.LAST_EDITED_BY:
return PropertyKind.SYS_USER;
default:
return null;
}
}
// System property type → camelCase column name on `base_rows`.
// Kysely camel-case plugin maps to snake_case in SQL.
export const SYSTEM_COLUMN: Record<string, 'createdAt' | 'updatedAt' | 'lastUpdatedById'> = {
[BasePropertyType.CREATED_AT]: 'createdAt',
[BasePropertyType.LAST_EDITED_AT]: 'updatedAt',
[BasePropertyType.LAST_EDITED_BY]: 'lastUpdatedById',
};
export function isSystemType(type: string): boolean {
return type in SYSTEM_COLUMN;
}
@@ -0,0 +1,404 @@
import { Expression, ExpressionBuilder, sql, SqlBool } from 'kysely';
import { DB } from '@docmost/db/types/db';
import { BaseProperty } from '@docmost/db/types/entity.types';
import { Condition, FilterNode } from './schema.zod';
import { PropertyKind, propertyKind, SYSTEM_COLUMN } from './kinds';
import {
arrayCell,
boolCell,
dateCell,
escapeIlike,
numericCell,
textCell,
} from './extractors';
export type PropertySchema = Map<
string,
Pick<BaseProperty, 'id' | 'type' | 'typeOptions'>
>;
type Eb = ExpressionBuilder<DB, 'baseRows'>;
const TRUE = sql<SqlBool>`TRUE`;
const FALSE = sql<SqlBool>`FALSE`;
export function buildWhere(
eb: Eb,
node: FilterNode,
schema: PropertySchema,
): Expression<SqlBool> {
if ('children' in node) {
if (node.children.length === 0) return TRUE;
const built = node.children.map((c) => buildWhere(eb, c, schema));
return node.op === 'and' ? eb.and(built) : eb.or(built);
}
return buildCondition(eb, node, schema);
}
function buildCondition(
eb: Eb,
cond: Condition,
schema: PropertySchema,
): Expression<SqlBool> {
const prop = schema.get(cond.propertyId);
if (!prop) return FALSE;
const sysCol = SYSTEM_COLUMN[prop.type];
if (sysCol) return systemCondition(eb, sysCol, prop.type, cond);
const kind = propertyKind(prop.type);
if (!kind) return FALSE;
switch (kind) {
case PropertyKind.TEXT:
return textCondition(eb, cond);
case PropertyKind.NUMERIC:
return numericCondition(eb, cond);
case PropertyKind.DATE:
return dateCondition(eb, cond);
case PropertyKind.BOOL:
return boolCondition(eb, cond);
case PropertyKind.SELECT:
return selectCondition(eb, cond);
case PropertyKind.MULTI:
return multiCondition(eb, cond);
case PropertyKind.PERSON:
return personCondition(eb, cond, prop);
case PropertyKind.FILE:
return arrayOfIdsCondition(eb, cond);
default:
return FALSE;
}
}
// --- per-kind handlers ------------------------------------------------
function textCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = textCell(cond.propertyId);
const val = cond.value;
switch (cond.op) {
case 'isEmpty':
return eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '=', ''),
]);
case 'isNotEmpty':
return eb.and([
eb(expr as any, 'is not', null),
eb(expr as any, '!=', ''),
]);
case 'eq':
return val == null ? FALSE : eb(expr as any, '=', String(val));
case 'neq':
return val == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', String(val)),
]);
case 'contains':
return val == null
? FALSE
: eb(expr as any, 'ilike', `%${escapeIlike(String(val))}%`);
case 'ncontains':
return val == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, 'not ilike', `%${escapeIlike(String(val))}%`),
]);
case 'startsWith':
return val == null
? FALSE
: eb(expr as any, 'ilike', `${escapeIlike(String(val))}%`);
case 'endsWith':
return val == null
? FALSE
: eb(expr as any, 'ilike', `%${escapeIlike(String(val))}`);
default:
return FALSE;
}
}
function numericCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = numericCell(cond.propertyId);
const raw = cond.value;
const num = raw == null ? null : Number(raw);
const bad = num == null || Number.isNaN(num);
switch (cond.op) {
case 'isEmpty':
return eb(expr as any, 'is', null);
case 'isNotEmpty':
return eb(expr as any, 'is not', null);
case 'eq':
return bad ? FALSE : eb(expr as any, '=', num);
case 'neq':
return bad
? FALSE
: eb.or([eb(expr as any, 'is', null), eb(expr as any, '!=', num)]);
case 'gt':
return bad ? FALSE : eb(expr as any, '>', num);
case 'gte':
return bad ? FALSE : eb(expr as any, '>=', num);
case 'lt':
return bad ? FALSE : eb(expr as any, '<', num);
case 'lte':
return bad ? FALSE : eb(expr as any, '<=', num);
default:
return FALSE;
}
}
function dateCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = dateCell(cond.propertyId);
const raw = cond.value;
const bad = raw == null || raw === '';
switch (cond.op) {
case 'isEmpty':
return eb(expr as any, 'is', null);
case 'isNotEmpty':
return eb(expr as any, 'is not', null);
case 'eq':
return bad ? FALSE : eb(expr as any, '=', String(raw));
case 'neq':
return bad
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', String(raw)),
]);
case 'before':
return bad ? FALSE : eb(expr as any, '<', String(raw));
case 'after':
return bad ? FALSE : eb(expr as any, '>', String(raw));
case 'onOrBefore':
return bad ? FALSE : eb(expr as any, '<=', String(raw));
case 'onOrAfter':
return bad ? FALSE : eb(expr as any, '>=', String(raw));
default:
return FALSE;
}
}
function boolCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = boolCell(cond.propertyId);
switch (cond.op) {
case 'isEmpty':
return eb(expr as any, 'is', null);
case 'isNotEmpty':
return eb(expr as any, 'is not', null);
case 'eq':
return cond.value == null
? FALSE
: eb(expr as any, '=', Boolean(cond.value));
case 'neq':
return cond.value == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', Boolean(cond.value)),
]);
default:
return FALSE;
}
}
function selectCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
// Cell stores a single option UUID as string. Use text extractor.
const expr = textCell(cond.propertyId);
const val = cond.value;
switch (cond.op) {
case 'isEmpty':
return eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '=', ''),
]);
case 'isNotEmpty':
return eb.and([
eb(expr as any, 'is not', null),
eb(expr as any, '!=', ''),
]);
case 'eq':
return val == null ? FALSE : eb(expr as any, '=', String(val));
case 'neq':
return val == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', String(val)),
]);
case 'any': {
const arr = asStringArray(val);
if (arr.length === 0) return FALSE;
return eb(expr as any, 'in', arr);
}
case 'none': {
const arr = asStringArray(val);
if (arr.length === 0) return TRUE;
return eb.or([
eb(expr as any, 'is', null),
eb(expr as any, 'not in', arr),
]);
}
default:
return FALSE;
}
}
function multiCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
return arrayOfIdsCondition(eb, cond);
}
function personCondition(
eb: Eb,
cond: Condition,
prop: Pick<BaseProperty, 'id' | 'type' | 'typeOptions'>,
): Expression<SqlBool> {
// Person cells may be stored as a single uuid or an array of uuids depending
// on the property's `allowMultiple` option. Normalise to array semantics via
// `base_cell_array` when it's stored as an array, else text.
const allowMultiple = !!(prop.typeOptions as any)?.allowMultiple;
if (allowMultiple) return arrayOfIdsCondition(eb, cond);
const expr = textCell(cond.propertyId);
const val = cond.value;
switch (cond.op) {
case 'isEmpty':
return eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '=', ''),
]);
case 'isNotEmpty':
return eb.and([
eb(expr as any, 'is not', null),
eb(expr as any, '!=', ''),
]);
case 'eq':
return val == null ? FALSE : eb(expr as any, '=', String(val));
case 'neq':
return val == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', String(val)),
]);
case 'any': {
const arr = asStringArray(val);
if (arr.length === 0) return FALSE;
return eb(expr as any, 'in', arr);
}
default:
return FALSE;
}
}
function arrayOfIdsCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = arrayCell(cond.propertyId);
const val = cond.value;
switch (cond.op) {
case 'isEmpty':
return eb.or([
eb(expr as any, 'is', null),
sql<SqlBool>`jsonb_array_length(${expr}) = 0`,
]);
case 'isNotEmpty':
return eb.and([
eb(expr as any, 'is not', null),
sql<SqlBool>`jsonb_array_length(${expr}) > 0`,
]);
case 'any': {
const arr = asStringArray(val);
if (arr.length === 0) return FALSE;
return sql<SqlBool>`${expr} ?| ${arr}`;
}
case 'all': {
const arr = asStringArray(val);
if (arr.length === 0) return TRUE;
// `::text::jsonb` because postgres.js auto-detects JSON-shaped strings
// as jsonb and re-encodes them, producing a jsonb *string* instead of
// an array. Without the text hop, the containment check never matches.
return sql<SqlBool>`${expr} @> ${JSON.stringify(arr)}::text::jsonb`;
}
case 'none': {
const arr = asStringArray(val);
if (arr.length === 0) return TRUE;
return eb.or([
eb(expr as any, 'is', null),
sql<SqlBool>`NOT (${expr} ?| ${arr})`,
]);
}
default:
return FALSE;
}
}
function systemCondition(
eb: Eb,
column: 'createdAt' | 'updatedAt' | 'lastUpdatedById',
propertyType: string,
cond: Condition,
): Expression<SqlBool> {
const ref = eb.ref(column);
const val = cond.value;
// lastEditedBy — UUID column; behaves like select (uuid equality, in, isEmpty).
if (propertyType === 'lastEditedBy') {
switch (cond.op) {
case 'isEmpty':
return eb(ref, 'is', null);
case 'isNotEmpty':
return eb(ref, 'is not', null);
case 'eq':
return val == null ? FALSE : eb(ref, '=', String(val));
case 'neq':
return val == null
? FALSE
: eb.or([eb(ref, 'is', null), eb(ref, '!=', String(val))]);
case 'any': {
const arr = asStringArray(val);
if (arr.length === 0) return FALSE;
return eb(ref, 'in', arr);
}
case 'none': {
const arr = asStringArray(val);
if (arr.length === 0) return TRUE;
return eb.or([eb(ref, 'is', null), eb(ref, 'not in', arr)]);
}
default:
return FALSE;
}
}
// createdAt / updatedAt — timestamptz columns (NOT NULL).
const bad = val == null || val === '';
switch (cond.op) {
case 'isEmpty':
return FALSE;
case 'isNotEmpty':
return TRUE;
case 'eq':
return bad ? FALSE : eb(ref, '=', String(val));
case 'neq':
return bad ? FALSE : eb(ref, '!=', String(val));
case 'before':
return bad ? FALSE : eb(ref, '<', String(val));
case 'after':
return bad ? FALSE : eb(ref, '>', String(val));
case 'onOrBefore':
return bad ? FALSE : eb(ref, '<=', String(val));
case 'onOrAfter':
return bad ? FALSE : eb(ref, '>=', String(val));
default:
return FALSE;
}
}
// --- utilities --------------------------------------------------------
function asStringArray(val: unknown): string[] {
if (val == null) return [];
if (Array.isArray(val)) return val.filter((v) => v != null).map(String);
return [String(val)];
}
export { TRUE as TRUE_EXPR, FALSE as FALSE_EXPR };
@@ -0,0 +1,100 @@
import { z } from 'zod';
export const MAX_FILTER_DEPTH = 5;
export const MAX_FILTER_NODES = 50;
export const MAX_SORTS = 5;
const uuid = z.uuid();
export const operatorSchema = z.enum([
'eq',
'neq',
'gt',
'gte',
'lt',
'lte',
'contains',
'ncontains',
'startsWith',
'endsWith',
'isEmpty',
'isNotEmpty',
'before',
'after',
'onOrBefore',
'onOrAfter',
'any',
'none',
'all',
]);
export type Operator = z.infer<typeof operatorSchema>;
export const conditionSchema = z.object({
propertyId: uuid,
op: operatorSchema,
value: z.unknown().optional(),
});
export type Condition = z.infer<typeof conditionSchema>;
export type FilterNode = Condition | FilterGroup;
export type FilterGroup = {
op: 'and' | 'or';
children: FilterNode[];
};
// Recursive Zod schema for grouped filter trees.
export const filterNodeSchema: z.ZodType<FilterNode> = z.lazy(() =>
z.union([conditionSchema, filterGroupSchema]),
);
export const filterGroupSchema: z.ZodType<FilterGroup> = z.lazy(() =>
z.object({
op: z.enum(['and', 'or']),
children: z.array(filterNodeSchema),
}),
);
// Count nodes + max depth to prevent pathological trees from reaching SQL.
export function validateFilterTree(node: FilterNode): void {
let nodes = 0;
const walk = (n: FilterNode, depth: number) => {
if (depth > MAX_FILTER_DEPTH) {
throw new Error(`Filter tree exceeds max depth ${MAX_FILTER_DEPTH}`);
}
nodes += 1;
if (nodes > MAX_FILTER_NODES) {
throw new Error(`Filter tree exceeds max node count ${MAX_FILTER_NODES}`);
}
if ('children' in n) {
for (const c of n.children) walk(c, depth + 1);
}
};
walk(node, 0);
}
export const sortSpecSchema = z.object({
propertyId: uuid,
direction: z.enum(['asc', 'desc']),
});
export type SortSpec = z.infer<typeof sortSpecSchema>;
export const sortsSchema = z.array(sortSpecSchema).max(MAX_SORTS);
export const searchSchema = z.object({
query: z.string().min(1).max(500),
mode: z.enum(['trgm', 'fts']).default('trgm'),
});
export type SearchSpec = z.infer<typeof searchSchema>;
// Top-level request DTO shape. The row controller DTO composes this.
export const listQuerySchema = z.object({
filter: filterGroupSchema.optional(),
sorts: sortsSchema.optional(),
search: searchSchema.optional(),
});
export type ListQuery = z.infer<typeof listQuerySchema>;
@@ -0,0 +1,27 @@
import { Expression, ExpressionBuilder, sql, SqlBool } from 'kysely';
import { DB } from '@docmost/db/types/db';
import { SearchSpec } from './schema.zod';
type Eb = ExpressionBuilder<DB, 'baseRows'>;
/*
* `search_text` and `search_tsv` are maintained by the base_rows search
* trigger installed in the bases-hardening migration. Both columns are
* indexed — pg_trgm GIN for ILIKE and standard GIN for tsvector.
*/
export function buildSearch(eb: Eb, spec: SearchSpec): Expression<SqlBool> {
const q = spec.query.trim();
if (!q) return sql<SqlBool>`TRUE`;
if (spec.mode === 'fts') {
// Accent-insensitive match via f_unaccent (same helper the search
// trigger uses when populating search_tsv / search_text).
return sql<SqlBool>`search_tsv @@ plainto_tsquery('english', f_unaccent(${q}))`;
}
// trigram ILIKE mode (default). escape %/_/\\ in user input so wildcards
// can't be injected.
const escaped = q.replace(/[%_\\]/g, '\\$&');
return sql<SqlBool>`search_text ILIKE ${'%' + escaped + '%'}`;
}
+112
View File
@@ -0,0 +1,112 @@
import { RawBuilder, sql } from 'kysely';
import { BaseProperty } from '@docmost/db/types/entity.types';
import { SortSpec } from './schema.zod';
import { PropertyKind, SYSTEM_COLUMN, propertyKind } from './kinds';
import {
boolCell,
dateCell,
numericCell,
textCell,
} from './extractors';
import { PropertySchema } from './predicate';
/*
* Builds sort expressions with sentinel wrapping so NULLs compare
* deterministically at the end of the sort order. This avoids the
* `__null__` string sentinel bug in the old cursor encoder: because the
* sort expression never returns NULL, the cursor simply stores the
* extracted value and keyset comparisons work natively.
*/
export type SortBuild = {
key: string; // alias used in cursor (s0, s1, ...)
expression: RawBuilder<any>; // COALESCE-wrapped expression with sentinel
direction: 'asc' | 'desc';
valueType: 'numeric' | 'date' | 'text' | 'bool';
};
export type TailKey = 'position' | 'id';
export const CURSOR_TAIL_KEYS: TailKey[] = ['position', 'id'];
export function buildSorts(
sorts: SortSpec[],
schema: PropertySchema,
): SortBuild[] {
const out: SortBuild[] = [];
for (let i = 0; i < sorts.length; i++) {
const s = sorts[i];
const prop = schema.get(s.propertyId);
if (!prop) continue;
const key = `s${i}`;
const dir = s.direction;
const sysCol = SYSTEM_COLUMN[prop.type];
if (sysCol) {
out.push({
key,
expression: sql`${sql.ref(sysCol)}`,
direction: dir,
valueType: prop.type === 'lastEditedBy' ? 'text' : 'date',
});
continue;
}
const kind = propertyKind(prop.type);
if (!kind) continue;
out.push(wrapWithSentinel(s.propertyId, kind, dir, key));
}
return out;
}
function wrapWithSentinel(
propertyId: string,
kind: Exclude<ReturnType<typeof propertyKind>, null>,
direction: 'asc' | 'desc',
key: string,
): SortBuild {
if (kind === PropertyKind.NUMERIC) {
const sentinel =
direction === 'asc'
? sql`'Infinity'::numeric`
: sql`'-Infinity'::numeric`;
return {
key,
expression: sql`COALESCE(${numericCell(propertyId)}, ${sentinel})`,
direction,
valueType: 'numeric',
};
}
if (kind === PropertyKind.DATE) {
const sentinel =
direction === 'asc'
? sql`'infinity'::timestamptz`
: sql`'-infinity'::timestamptz`;
return {
key,
expression: sql`COALESCE(${dateCell(propertyId)}, ${sentinel})`,
direction,
valueType: 'date',
};
}
if (kind === PropertyKind.BOOL) {
// false < true. ASC NULLS LAST => null → true; DESC NULLS LAST => null → false.
const sentinel = direction === 'asc' ? sql`TRUE` : sql`FALSE`;
return {
key,
expression: sql`COALESCE(${boolCell(propertyId)}, ${sentinel})`,
direction,
valueType: 'bool',
};
}
// TEXT / SELECT / MULTI / PERSON / FILE — sort by raw extracted text.
const sentinel = direction === 'asc' ? sql`chr(1114111)` : sql`''`;
return {
key,
expression: sql`COALESCE(${textCell(propertyId)}, ${sentinel})`,
direction,
valueType: 'text',
};
}
@@ -0,0 +1,47 @@
import { BaseProperty, BaseRow, BaseView } from '@docmost/db/types/entity.types';
/*
* Domain event payloads emitted by the base services after each mutation
* commits. `base-ws-consumers.ts` picks these up and fans them out onto
* the appropriate socket.io room. `requestId` lets the originating client
* skip replaying its own echo.
*/
type BaseEventBase = {
baseId: string;
workspaceId: string;
actorId?: string | null;
requestId?: string | null;
};
export type BaseRowCreatedEvent = BaseEventBase & { row: BaseRow };
export type BaseRowUpdatedEvent = BaseEventBase & {
rowId: string;
patch: Record<string, unknown>;
updatedCells: Record<string, unknown>;
};
export type BaseRowDeletedEvent = BaseEventBase & { rowId: string };
export type BaseRowRestoredEvent = BaseEventBase & { rowId: string };
export type BaseRowReorderedEvent = BaseEventBase & {
rowId: string;
position: string;
};
export type BasePropertyCreatedEvent = BaseEventBase & {
property: BaseProperty;
};
export type BasePropertyUpdatedEvent = BaseEventBase & {
property: BaseProperty;
schemaVersion: number;
};
export type BasePropertyDeletedEvent = BaseEventBase & { propertyId: string };
export type BasePropertyReorderedEvent = BaseEventBase & {
propertyId: string;
position: string;
};
export type BaseViewCreatedEvent = BaseEventBase & { view: BaseView };
export type BaseViewUpdatedEvent = BaseEventBase & { view: BaseView };
export type BaseViewDeletedEvent = BaseEventBase & { viewId: string };
export type BaseSchemaBumpedEvent = BaseEventBase & { schemaVersion: number };
@@ -0,0 +1,177 @@
import { Logger, OnModuleDestroy } from '@nestjs/common';
import { OnWorkerEvent, Processor, WorkerHost } from '@nestjs/bullmq';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { Job } from 'bullmq';
import { InjectKysely } from 'nestjs-kysely';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { executeTx } from '@docmost/db/utils';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
import {
IBaseCellGcJob,
IBaseTypeConversionJob,
} from '../../../integrations/queue/constants/queue.interface';
import { processBaseTypeConversion } from '../tasks/base-type-conversion.task';
import { processBaseCellGc } from '../tasks/base-cell-gc.task';
import { EventName } from '../../../common/events/event.contants';
import {
BasePropertyUpdatedEvent,
BaseSchemaBumpedEvent,
} from '../events/base-events';
@Processor(QueueName.BASE_QUEUE)
export class BaseQueueProcessor
extends WorkerHost
implements OnModuleDestroy
{
private readonly logger = new Logger(BaseQueueProcessor.name);
constructor(
@InjectKysely() private readonly db: KyselyDB,
private readonly baseRowRepo: BaseRowRepo,
private readonly basePropertyRepo: BasePropertyRepo,
private readonly baseRepo: BaseRepo,
private readonly eventEmitter: EventEmitter2,
) {
super();
}
async process(job: Job): Promise<unknown> {
switch (job.name) {
case QueueJob.BASE_TYPE_CONVERSION: {
const data = job.data as IBaseTypeConversionJob;
// Cell rewrite + pending→live swap + schema_version bump share one
// transaction so readers never see cells already in the new format
// under a still-pending type (or vice versa).
const { summary, schemaVersion } = await executeTx(
this.db,
async (trx) => {
const s = await processBaseTypeConversion(
this.db,
this.baseRowRepo,
data,
{
trx,
progress: (processed) => job.updateProgress({ processed }),
},
);
await this.basePropertyRepo.commitPendingTypeChange(
data.propertyId,
trx,
);
await this.basePropertyRepo.bumpSchemaVersion(data.propertyId, trx);
const v = await this.baseRepo.bumpSchemaVersion(data.baseId, trx);
return { summary: s, schemaVersion: v };
},
);
// Emit the property:updated first so clients drop the "Converting…"
// badge and repaint headers with the new type, then schema:bumped
// so they invalidate row caches to pick up migrated cells.
const updated = await this.basePropertyRepo.findById(data.propertyId);
if (updated) {
const event: BasePropertyUpdatedEvent = {
baseId: data.baseId,
workspaceId: data.workspaceId,
actorId: data.actorId ?? null,
requestId: null,
property: updated,
schemaVersion: updated.schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
}
this.emitSchemaBumped(
data.baseId,
data.workspaceId,
schemaVersion,
data.actorId,
);
return summary;
}
case QueueJob.BASE_CELL_GC: {
const data = job.data as IBaseCellGcJob;
await processBaseCellGc(
this.db,
this.baseRowRepo,
this.basePropertyRepo,
data,
);
const schemaVersion = await this.baseRepo.bumpSchemaVersion(
data.baseId,
);
this.emitSchemaBumped(data.baseId, data.workspaceId, schemaVersion);
return;
}
default:
this.logger.warn(`Unknown job: ${job.name}`);
}
}
private emitSchemaBumped(
baseId: string,
workspaceId: string,
schemaVersion: number,
actorId?: string,
): void {
const event: BaseSchemaBumpedEvent = {
baseId,
workspaceId,
actorId: actorId ?? null,
requestId: null,
schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_SCHEMA_BUMPED, event);
}
@OnWorkerEvent('active')
onActive(job: Job) {
this.logger.debug(`Processing ${job.name} job ${job.id}`);
}
@OnWorkerEvent('failed')
async onError(job: Job) {
this.logger.error(
`Error processing ${job.name} job ${job.id}. Reason: ${job.failedReason}`,
);
// Clean up a stuck conversion so the column doesn't wedge in
// "Converting…" forever. Cells remain under the original type because
// the rewrite transaction rolled back.
if (job.name === QueueJob.BASE_TYPE_CONVERSION) {
const data = job.data as IBaseTypeConversionJob;
try {
await this.basePropertyRepo.clearPendingTypeChange(data.propertyId);
const reverted = await this.basePropertyRepo.findById(data.propertyId);
if (reverted) {
const event: BasePropertyUpdatedEvent = {
baseId: data.baseId,
workspaceId: data.workspaceId,
actorId: data.actorId ?? null,
requestId: null,
property: reverted,
schemaVersion: reverted.schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
}
} catch (cleanupErr) {
this.logger.error(
`Failed to clear pending type change on property ${data.propertyId}`,
cleanupErr as Error,
);
}
}
}
@OnWorkerEvent('completed')
onCompleted(job: Job) {
this.logger.debug(`Completed ${job.name} job ${job.id}`);
}
async onModuleDestroy(): Promise<void> {
if (this.worker) {
await this.worker.close();
}
}
}
@@ -0,0 +1,74 @@
import { Injectable, Logger } from '@nestjs/common';
import { RedisService } from '@nestjs-labs/nestjs-ioredis';
import type { Redis } from 'ioredis';
const PRESENCE_KEY_PREFIX = 'presence:base:';
const PRESENCE_ENTRY_TTL_MS = 10_000;
const PRESENCE_KEY_TTL_S = 60;
export type PresenceEntry = {
userId: string;
cellId?: string | null;
selection?: unknown;
ts: number;
};
/*
* Ephemeral per-base presence. No DB. `presence:base:{baseId}` is a Redis
* HASH keyed by userId with a JSON-serialised entry. Entries older than
* PRESENCE_ENTRY_TTL_MS are filtered on read; the key itself is refreshed
* with a longer Redis EXPIRE on every write so unused rooms drain on
* their own.
*/
@Injectable()
export class BasePresenceService {
private readonly logger = new Logger(BasePresenceService.name);
private readonly redis: Redis;
constructor(private readonly redisService: RedisService) {
this.redis = this.redisService.getOrThrow();
}
async setPresence(
baseId: string,
entry: PresenceEntry,
): Promise<void> {
const key = PRESENCE_KEY_PREFIX + baseId;
await this.redis
.multi()
.hset(key, entry.userId, JSON.stringify(entry))
.expire(key, PRESENCE_KEY_TTL_S)
.exec();
}
async leave(baseId: string, userId: string): Promise<void> {
const key = PRESENCE_KEY_PREFIX + baseId;
await this.redis.hdel(key, userId);
}
async snapshot(baseId: string): Promise<PresenceEntry[]> {
const key = PRESENCE_KEY_PREFIX + baseId;
const raw = await this.redis.hgetall(key);
const now = Date.now();
const out: PresenceEntry[] = [];
const stale: string[] = [];
for (const [field, value] of Object.entries(raw)) {
try {
const entry = JSON.parse(value) as PresenceEntry;
if (now - entry.ts <= PRESENCE_ENTRY_TTL_MS) {
out.push(entry);
} else {
stale.push(field);
}
} catch {
stale.push(field);
}
}
// Opportunistic GC so the hash doesn't accumulate during long-lived
// rooms where the key TTL keeps getting refreshed by active users.
if (stale.length > 0) {
this.redis.hdel(key, ...stale).catch(() => {});
}
return out;
}
}
@@ -0,0 +1,165 @@
import { Injectable, Logger } from '@nestjs/common';
import { OnEvent } from '@nestjs/event-emitter';
import { EventName } from '../../../common/events/event.contants';
import { BaseWsService } from './base-ws.service';
import {
BasePropertyCreatedEvent,
BasePropertyDeletedEvent,
BasePropertyReorderedEvent,
BasePropertyUpdatedEvent,
BaseRowCreatedEvent,
BaseRowDeletedEvent,
BaseRowReorderedEvent,
BaseRowUpdatedEvent,
BaseSchemaBumpedEvent,
BaseViewCreatedEvent,
BaseViewDeletedEvent,
BaseViewUpdatedEvent,
} from '../events/base-events';
/*
* In-process listeners that forward base domain events onto the
* `base-{baseId}` socket.io room. Originating clients suppress their own
* echoes via `requestId`.
*/
@Injectable()
export class BaseWsConsumers {
private readonly logger = new Logger(BaseWsConsumers.name);
constructor(private readonly ws: BaseWsService) {}
@OnEvent(EventName.BASE_ROW_CREATED)
onRowCreated(e: BaseRowCreatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:row:created',
baseId: e.baseId,
row: e.row,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_ROW_UPDATED)
onRowUpdated(e: BaseRowUpdatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:row:updated',
baseId: e.baseId,
rowId: e.rowId,
patch: e.patch,
updatedCells: e.updatedCells,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_ROW_DELETED)
onRowDeleted(e: BaseRowDeletedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:row:deleted',
baseId: e.baseId,
rowId: e.rowId,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_ROW_REORDERED)
onRowReordered(e: BaseRowReorderedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:row:reordered',
baseId: e.baseId,
rowId: e.rowId,
position: e.position,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_PROPERTY_CREATED)
onPropertyCreated(e: BasePropertyCreatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:property:created',
baseId: e.baseId,
property: e.property,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_PROPERTY_UPDATED)
onPropertyUpdated(e: BasePropertyUpdatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:property:updated',
baseId: e.baseId,
property: e.property,
schemaVersion: e.schemaVersion,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_PROPERTY_DELETED)
onPropertyDeleted(e: BasePropertyDeletedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:property:deleted',
baseId: e.baseId,
propertyId: e.propertyId,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_PROPERTY_REORDERED)
onPropertyReordered(e: BasePropertyReorderedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:property:reordered',
baseId: e.baseId,
propertyId: e.propertyId,
position: e.position,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_VIEW_CREATED)
onViewCreated(e: BaseViewCreatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:view:created',
baseId: e.baseId,
view: e.view,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_VIEW_UPDATED)
onViewUpdated(e: BaseViewUpdatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:view:updated',
baseId: e.baseId,
view: e.view,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_VIEW_DELETED)
onViewDeleted(e: BaseViewDeletedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:view:deleted',
baseId: e.baseId,
viewId: e.viewId,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_SCHEMA_BUMPED)
onSchemaBumped(e: BaseSchemaBumpedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:schema:bumped',
baseId: e.baseId,
schemaVersion: e.schemaVersion,
});
}
}
@@ -0,0 +1,233 @@
import { Injectable, Logger } from '@nestjs/common';
import { z } from 'zod';
import type { Server, Socket } from 'socket.io';
import { SpaceMemberRepo } from '@docmost/db/repos/space/space-member.repo';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { findHighestUserSpaceRole } from '@docmost/db/repos/space/utils';
import { getBaseRoomName } from '../../../ws/ws.utils';
import { BasePresenceService, PresenceEntry } from './base-presence.service';
/*
* Inbound shapes from untrusted socket clients. Zod-validated at the
* boundary so malformed payloads (non-uuid baseId, missing fields,
* oversized selection blobs) never reach the permission check or Redis.
*/
const baseSubscribeSchema = z.object({
operation: z.literal('base:subscribe'),
baseId: z.uuid(),
});
const baseUnsubscribeSchema = z.object({
operation: z.literal('base:unsubscribe'),
baseId: z.uuid(),
});
const basePresenceSchema = z.object({
operation: z.literal('base:presence'),
baseId: z.uuid(),
cellId: z.string().max(200).optional().nullable(),
selection: z.unknown().optional(),
});
const basePresenceLeaveSchema = z.object({
operation: z.literal('base:presence:leave'),
baseId: z.uuid(),
});
const inboundSchema = z.union([
baseSubscribeSchema,
baseUnsubscribeSchema,
basePresenceSchema,
basePresenceLeaveSchema,
]);
type BaseInbound = z.infer<typeof inboundSchema>;
type BaseOutbound = { operation: `base:${string}` } & Record<string, unknown>;
@Injectable()
export class BaseWsService {
private readonly logger = new Logger(BaseWsService.name);
private server: Server | null = null;
constructor(
private readonly baseRepo: BaseRepo,
private readonly spaceMemberRepo: SpaceMemberRepo,
private readonly presence: BasePresenceService,
) {}
setServer(server: Server): void {
this.server = server;
}
isBaseEvent(data: any): boolean {
return (
typeof data?.operation === 'string' && data.operation.startsWith('base:')
);
}
async handleInbound(client: Socket, raw: unknown): Promise<void> {
const parsed = inboundSchema.safeParse(raw);
if (!parsed.success) {
this.logger.debug(
`Rejecting inbound base event: ${parsed.error.issues[0]?.message}`,
);
return;
}
const data = parsed.data;
switch (data.operation) {
case 'base:subscribe':
await this.subscribe(client, data.baseId);
return;
case 'base:unsubscribe':
await this.unsubscribe(client, data.baseId);
return;
case 'base:presence':
await this.handlePresence(client, data);
return;
case 'base:presence:leave':
await this.handlePresenceLeave(client, data.baseId);
return;
}
}
emitToBase(baseId: string, payload: BaseOutbound): void {
if (!this.server) return;
this.server.to(getBaseRoomName(baseId)).emit('message', payload);
}
/*
* Called from WsGateway on client disconnect. Walks the per-socket
* set of subscribed bases and cleans up presence without waiting for
* entry TTLs to expire — keeps the snapshot fresh for others in the
* room.
*/
async handleDisconnect(client: Socket): Promise<void> {
const userId = client.data?.userId as string | undefined;
const subs = this.subscriptionsFor(client);
if (!userId || subs.size === 0) return;
for (const baseId of subs) {
await this.presence.leave(baseId, userId);
this.emitToBase(baseId, {
operation: 'base:presence:leave',
baseId,
userId,
});
}
subs.clear();
}
// --- private -------------------------------------------------------
private async subscribe(client: Socket, baseId: string): Promise<void> {
const userId = client.data?.userId as string | undefined;
if (!userId) {
client.emit('message', {
operation: 'base:subscribe:error',
baseId,
reason: 'unauthenticated',
});
return;
}
const base = await this.baseRepo.findById(baseId);
if (!base) {
client.emit('message', {
operation: 'base:subscribe:error',
baseId,
reason: 'not_found',
});
return;
}
const canRead = await this.canReadBaseSpace(userId, base.spaceId);
if (!canRead) {
client.emit('message', {
operation: 'base:subscribe:error',
baseId,
reason: 'forbidden',
});
return;
}
client.join(getBaseRoomName(baseId));
this.subscriptionsFor(client).add(baseId);
// Send the current presence snapshot to just this client so their UI
// can paint who's already editing what.
const snapshot = await this.presence.snapshot(baseId);
client.emit('message', {
operation: 'base:presence:snapshot',
baseId,
entries: snapshot,
});
}
private async unsubscribe(client: Socket, baseId: string): Promise<void> {
const userId = client.data?.userId as string | undefined;
if (!userId) return;
client.leave(getBaseRoomName(baseId));
this.subscriptionsFor(client).delete(baseId);
await this.presence.leave(baseId, userId);
this.emitToBase(baseId, {
operation: 'base:presence:leave',
baseId,
userId,
});
}
private async handlePresence(
client: Socket,
data: Extract<BaseInbound, { operation: 'base:presence' }>,
): Promise<void> {
const userId = client.data?.userId as string | undefined;
if (!userId) return;
if (!client.rooms.has(getBaseRoomName(data.baseId))) return;
const entry: PresenceEntry = {
userId,
cellId: data.cellId ?? null,
selection: data.selection ?? null,
ts: Date.now(),
};
await this.presence.setPresence(data.baseId, entry);
this.emitToBase(data.baseId, {
operation: 'base:presence',
baseId: data.baseId,
...entry,
});
}
private async handlePresenceLeave(
client: Socket,
baseId: string,
): Promise<void> {
const userId = client.data?.userId as string | undefined;
if (!userId) return;
await this.presence.leave(baseId, userId);
this.emitToBase(baseId, {
operation: 'base:presence:leave',
baseId,
userId,
});
}
private async canReadBaseSpace(
userId: string,
spaceId: string,
): Promise<boolean> {
const roles = await this.spaceMemberRepo.getUserSpaceRoles(userId, spaceId);
return !!findHighestUserSpaceRole(roles);
}
private subscriptionsFor(client: Socket): Set<string> {
const existing = client.data.baseSubscriptions as Set<string> | undefined;
if (existing) return existing;
const fresh = new Set<string>();
client.data.baseSubscriptions = fresh;
return fresh;
}
}
@@ -1,13 +1,21 @@
import {
BadRequestException,
ConflictException,
Injectable,
Logger,
NotFoundException,
ServiceUnavailableException,
} from '@nestjs/common';
import { InjectKysely } from 'nestjs-kysely';
import { InjectQueue } from '@nestjs/bullmq';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { Queue } from 'bullmq';
import { sql, SqlBool } from 'kysely';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { executeTx } from '@docmost/db/utils';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { CreatePropertyDto } from '../dto/create-property.dto';
import {
UpdatePropertyDto,
@@ -15,49 +23,135 @@ import {
ReorderPropertyDto,
} from '../dto/update-property.dto';
import {
BasePropertyType,
BasePropertyTypeValue,
parseTypeOptions,
attemptCellConversion,
validateTypeOptions,
isSystemPropertyType,
} from '../base.schemas';
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
import {
IBaseCellGcJob,
IBaseTypeConversionJob,
} from '../../../integrations/queue/constants/queue.interface';
import { EventName } from '../../../common/events/event.contants';
import {
BasePropertyCreatedEvent,
BasePropertyDeletedEvent,
BasePropertyReorderedEvent,
BasePropertyUpdatedEvent,
BaseSchemaBumpedEvent,
} from '../events/base-events';
import { processBaseTypeConversion } from '../tasks/base-type-conversion.task';
/*
* Types whose cell values are IDs referencing external records. Converting
* them to any other type (especially text) requires an ID → display
* resolution pass — otherwise `select → text` persists the choice UUID
* instead of its display name (the bug that motivated this job).
*/
const ID_REFERENCING_TYPES: ReadonlySet<BasePropertyTypeValue> = new Set([
BasePropertyType.SELECT,
BasePropertyType.STATUS,
BasePropertyType.MULTI_SELECT,
BasePropertyType.PERSON,
BasePropertyType.FILE,
]);
/*
* Row-count cutoff below which the cell rewrite runs synchronously inside
* the HTTP request. Chosen so even worst-case (file → text with attachment
* name joins) completes comfortably under the default 30s request timeout.
* Larger bases fall back to the BullMQ worker path which flips the type
* only after the rewrite completes, showing a "Converting…" header state
* in the meantime.
*/
const INLINE_CONVERSION_ROW_LIMIT = 2000;
@Injectable()
export class BasePropertyService {
private readonly logger = new Logger(BasePropertyService.name);
constructor(
@InjectKysely() private readonly db: KyselyDB,
private readonly basePropertyRepo: BasePropertyRepo,
private readonly baseRowRepo: BaseRowRepo,
private readonly baseRepo: BaseRepo,
@InjectQueue(QueueName.BASE_QUEUE) private readonly baseQueue: Queue,
private readonly eventEmitter: EventEmitter2,
) {}
async create(workspaceId: string, dto: CreatePropertyDto) {
async create(workspaceId: string, dto: CreatePropertyDto, actorId?: string) {
const type = dto.type as BasePropertyTypeValue;
let validatedTypeOptions = null;
if (dto.typeOptions) {
validatedTypeOptions = parseTypeOptions(type, dto.typeOptions);
} else {
validatedTypeOptions = parseTypeOptions(type, {});
}
const validatedTypeOptions = dto.typeOptions
? parseTypeOptionsOrThrow(type, dto.typeOptions)
: parseTypeOptionsOrThrow(type, {});
const lastPosition = await this.basePropertyRepo.getLastPosition(
dto.baseId,
);
const position = generateJitteredKeyBetween(lastPosition, null);
return this.basePropertyRepo.insertProperty({
baseId: dto.baseId,
name: dto.name,
type: dto.type,
position,
typeOptions: validatedTypeOptions as any,
workspaceId,
const created = await executeTx(this.db, async (trx) => {
const row = await this.basePropertyRepo.insertProperty(
{
baseId: dto.baseId,
name: dto.name,
type: dto.type,
position,
typeOptions: validatedTypeOptions as any,
workspaceId,
},
trx,
);
await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
return row;
});
const event: BasePropertyCreatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: null,
property: created,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_CREATED, event);
return created;
}
async update(dto: UpdatePropertyDto) {
/*
* Metadata update. Three paths:
*
* - Coercion-safe (number↔text, text↔url, etc.): flip the type/type
* options in one transaction, bump schema_version, return. The engine
* reads cells through schema-on-read extractors so no cell rewrite is
* needed.
* - ID-referencing or system-involving conversion with a small number
* of rows: run the cell rewrite, flip the type, and bump
* schema_version all in one transaction — the HTTP request waits but
* nobody ever sees raw IDs under the new type.
* - Same kind of conversion on a large base: stage the target type on
* `pendingType` / `pendingTypeOptions`, keep the live `type` as-is,
* enqueue the worker. Clients render under the old type (so cells
* resolve to display names, not UUIDs) and show a "Converting…"
* badge until the worker transaction commits and swaps the pending
* pair onto `type`.
*/
async update(
dto: UpdatePropertyDto,
workspaceId: string,
actorId?: string,
) {
const t0 = Date.now();
const tick = (label: string) =>
this.logger.log(
`property-update ${dto.propertyId} ${label}=${Date.now() - t0}ms`,
);
const property = await this.basePropertyRepo.findById(dto.propertyId);
tick('after-findById');
if (!property) {
throw new NotFoundException('Property not found');
}
@@ -66,56 +160,218 @@ export class BasePropertyService {
throw new BadRequestException('Property does not belong to this base');
}
// Block concurrent type changes — the worker still owns the previous
// conversion, and letting a second one through would race on `type`.
if (property.pendingType) {
throw new ConflictException(
'A type conversion is already in progress for this property',
);
}
const isTypeChange = dto.type && dto.type !== property.type;
const oldType = property.type as BasePropertyTypeValue;
const oldTypeOptions = property.typeOptions;
const newType = (dto.type ?? property.type) as BasePropertyTypeValue;
let validatedTypeOptions = property.typeOptions;
if (dto.typeOptions !== undefined) {
validatedTypeOptions = parseTypeOptions(newType, dto.typeOptions) as any;
validatedTypeOptions = parseTypeOptionsOrThrow(
newType,
dto.typeOptions,
) as any;
} else if (isTypeChange) {
const result = validateTypeOptions(newType, {});
validatedTypeOptions = result.success ? (result.data as any) : null;
}
let conversionSummary: {
converted: number;
cleared: number;
total: number;
} | null = null;
const involvesSystem =
isSystemPropertyType(oldType) || isSystemPropertyType(newType);
const needsIdResolution = ID_REFERENCING_TYPES.has(oldType);
const needsCellRewrite =
isTypeChange && (involvesSystem || needsIdResolution);
if (isTypeChange) {
const involvesSystem =
isSystemPropertyType(property.type) || isSystemPropertyType(newType);
if (involvesSystem) {
conversionSummary = await this.clearCellValues(
dto.baseId,
// --- Path 1: no cell rewrite needed ---------------------------------
if (!needsCellRewrite) {
await executeTx(this.db, async (trx) => {
await this.basePropertyRepo.updateProperty(
dto.propertyId,
{
...(dto.name !== undefined && { name: dto.name }),
...(dto.type !== undefined && { type: dto.type }),
typeOptions: validatedTypeOptions,
},
trx,
);
} else {
conversionSummary = await this.convertCellValues(
dto.baseId,
dto.propertyId,
property.type as BasePropertyTypeValue,
newType,
);
}
if (isTypeChange) {
await this.basePropertyRepo.bumpSchemaVersion(dto.propertyId, trx);
await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
}
});
return this.loadAndEmit(dto, workspaceId, actorId, null);
}
await this.basePropertyRepo.updateProperty(dto.propertyId, {
...(dto.name !== undefined && { name: dto.name }),
...(dto.type !== undefined && { type: dto.type }),
typeOptions: validatedTypeOptions,
});
// --- Path 2 or 3: cell rewrite needed -------------------------------
const conversionPayload: IBaseTypeConversionJob = {
baseId: dto.baseId,
propertyId: dto.propertyId,
workspaceId,
fromType: oldType,
toType: newType,
fromTypeOptions: oldTypeOptions,
toTypeOptions: validatedTypeOptions,
clearMode: involvesSystem,
actorId,
};
const updatedProperty = await this.basePropertyRepo.findById(
// Count only the rows whose cell jsonb has this property's key — the
// set the worker will actually rewrite. A 100k-row base with the
// property set on 12 rows is trivial to convert inline; the previous
// count-all-live-rows check was routing those to the worker.
const rowsToConvert = await this.countRowsToConvert(
dto.baseId,
workspaceId,
dto.propertyId,
);
tick(`after-countRowsToConvert(${rowsToConvert})`);
return { property: updatedProperty, conversionSummary };
if (rowsToConvert <= INLINE_CONVERSION_ROW_LIMIT) {
tick('taking-inline-path');
// Path 2: inline rewrite. Apply the name-only fields (if any), run
// the rewrite, then flip the type — all in one transaction so
// readers only ever see a consistent snapshot.
const schemaVersion = await executeTx(this.db, async (trx) => {
if (dto.name !== undefined) {
await this.basePropertyRepo.updateProperty(
dto.propertyId,
{ name: dto.name },
trx,
);
}
await processBaseTypeConversion(
this.db,
this.baseRowRepo,
conversionPayload,
{ trx },
);
await this.basePropertyRepo.updateProperty(
dto.propertyId,
{
type: newType,
typeOptions: validatedTypeOptions,
},
trx,
);
await this.basePropertyRepo.bumpSchemaVersion(dto.propertyId, trx);
return this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
});
tick('inline-tx-done');
const bumpEvent: BaseSchemaBumpedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: null,
schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_SCHEMA_BUMPED, bumpEvent);
return this.loadAndEmit(dto, workspaceId, actorId, null);
}
// Path 3: stage the new type on pending_*, keep live `type` alone,
// and hand off to the worker. A best-effort revert clears the staging
// fields if the enqueue itself fails.
tick('taking-worker-path');
await executeTx(this.db, async (trx) => {
await this.basePropertyRepo.updateProperty(
dto.propertyId,
{
...(dto.name !== undefined && { name: dto.name }),
pendingType: newType,
pendingTypeOptions: validatedTypeOptions,
},
trx,
);
});
tick('after-set-pending');
let jobId: string | null = null;
try {
const job = await this.baseQueue.add(
QueueJob.BASE_TYPE_CONVERSION,
conversionPayload,
{ attempts: 1 },
);
jobId = String(job.id);
tick(`after-queue.add(${jobId})`);
} catch (err) {
this.logger.error(
`Enqueue of type-conversion failed for property ${dto.propertyId}; clearing pending state`,
err as Error,
);
try {
await this.basePropertyRepo.clearPendingTypeChange(dto.propertyId);
} catch (revertErr) {
this.logger.error(
`Failed to clear pending state on ${dto.propertyId}. Manual intervention required.`,
revertErr as Error,
);
}
throw new ServiceUnavailableException(
'Type conversion queue unavailable. Property update rolled back.',
);
}
const out = await this.loadAndEmit(dto, workspaceId, actorId, jobId);
tick('return');
return out;
}
async delete(dto: DeletePropertyDto) {
/*
* Reloads the property and emits `base.property.updated`. The emission
* has to happen after the outer transaction commits so socket consumers
* never race ahead of visibility.
*/
private async loadAndEmit(
dto: UpdatePropertyDto,
workspaceId: string,
actorId: string | undefined,
jobId: string | null,
) {
const updated = await this.basePropertyRepo.findById(dto.propertyId);
if (updated) {
const event: BasePropertyUpdatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: dto.requestId ?? null,
property: updated,
schemaVersion: updated.schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
}
return { property: updated, jobId };
}
private async countRowsToConvert(
baseId: string,
workspaceId: string,
propertyId: string,
): Promise<number> {
const row = await this.db
.selectFrom('baseRows')
.select(sql<string>`count(*)`.as('n'))
.where('baseId', '=', baseId)
.where('workspaceId', '=', workspaceId)
.where('deletedAt', 'is', null)
.where(sql<SqlBool>`cells ? ${propertyId}`)
.executeTakeFirst();
return Number(row?.n ?? 0);
}
async delete(
dto: DeletePropertyDto,
workspaceId: string,
actorId?: string,
) {
const property = await this.basePropertyRepo.findById(dto.propertyId);
if (!property) {
throw new NotFoundException('Property not found');
@@ -129,13 +385,56 @@ export class BasePropertyService {
throw new BadRequestException('Cannot delete the primary property');
}
// Soft-delete so queries filter the property out immediately, then
// enqueue cell-gc to scrub cell keys and hard-delete. If the enqueue
// fails, revert the soft-delete so the property isn't orphaned.
await executeTx(this.db, async (trx) => {
await this.basePropertyRepo.deleteProperty(dto.propertyId, trx);
await this.baseRowRepo.removeCellKey(dto.baseId, dto.propertyId, trx);
await this.basePropertyRepo.softDelete(dto.propertyId, trx);
await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
});
const payload: IBaseCellGcJob = {
baseId: dto.baseId,
propertyId: dto.propertyId,
workspaceId,
};
try {
await this.baseQueue.add(QueueJob.BASE_CELL_GC, payload, { attempts: 2 });
} catch (err) {
this.logger.error(
`Enqueue of cell-gc failed for property ${dto.propertyId}; reverting soft-delete`,
err as Error,
);
try {
await this.basePropertyRepo.updateProperty(dto.propertyId, {
deletedAt: null,
});
} catch (revertErr) {
this.logger.error(
`Revert failed for property ${dto.propertyId}. Manual intervention required.`,
revertErr as Error,
);
}
throw new ServiceUnavailableException(
'Cell-GC queue unavailable. Property delete rolled back.',
);
}
const event: BasePropertyDeletedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: dto.requestId ?? null,
propertyId: dto.propertyId,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_DELETED, event);
}
async reorder(dto: ReorderPropertyDto) {
async reorder(
dto: ReorderPropertyDto,
workspaceId: string,
actorId?: string,
) {
const property = await this.basePropertyRepo.findById(dto.propertyId);
if (!property) {
throw new NotFoundException('Property not found');
@@ -148,69 +447,29 @@ export class BasePropertyService {
await this.basePropertyRepo.updateProperty(dto.propertyId, {
position: dto.position,
});
}
private async clearCellValues(
baseId: string,
propertyId: string,
): Promise<{ converted: number; cleared: number; total: number }> {
const rows = await this.baseRowRepo.findAllByBaseId(baseId);
const updates: Array<{ id: string; cells: Record<string, unknown> }> = [];
for (const row of rows) {
const cells = row.cells as Record<string, unknown>;
if (propertyId in cells) {
updates.push({ id: row.id, cells: { [propertyId]: null } });
}
}
if (updates.length > 0) {
await executeTx(this.db, async (trx) => {
await this.baseRowRepo.batchUpdateCells(updates, trx);
});
}
return { converted: 0, cleared: updates.length, total: updates.length };
}
private async convertCellValues(
baseId: string,
propertyId: string,
fromType: BasePropertyTypeValue,
toType: BasePropertyTypeValue,
): Promise<{ converted: number; cleared: number; total: number }> {
const rows = await this.baseRowRepo.findAllByBaseId(baseId);
let converted = 0;
let cleared = 0;
let total = 0;
const updates: Array<{ id: string; cells: Record<string, unknown> }> = [];
for (const row of rows) {
const cells = row.cells as Record<string, unknown>;
if (!(propertyId in cells)) {
continue;
}
total++;
const currentValue = cells[propertyId];
const result = attemptCellConversion(fromType, toType, currentValue);
if (result.converted) {
converted++;
updates.push({ id: row.id, cells: { [propertyId]: result.value } });
} else {
cleared++;
updates.push({ id: row.id, cells: { [propertyId]: null } });
}
}
if (updates.length > 0) {
await executeTx(this.db, async (trx) => {
await this.baseRowRepo.batchUpdateCells(updates, trx);
});
}
return { converted, cleared, total };
const event: BasePropertyReorderedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: dto.requestId ?? null,
propertyId: dto.propertyId,
position: dto.position,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_REORDERED, event);
}
}
function parseTypeOptionsOrThrow(
type: BasePropertyTypeValue,
typeOptions: unknown,
): unknown {
try {
return parseTypeOptions(type, typeOptions);
} catch (err) {
throw new BadRequestException({
message: 'Invalid typeOptions',
issues: (err as any)?.issues ?? [],
});
}
}
@@ -4,6 +4,7 @@ import {
NotFoundException,
} from '@nestjs/common';
import { InjectKysely } from 'nestjs-kysely';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
@@ -11,6 +12,7 @@ import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
import { CreateRowDto } from '../dto/create-row.dto';
import {
UpdateRowDto,
DeleteRowDto,
ListRowsDto,
ReorderRowDto,
} from '../dto/update-row.dto';
@@ -22,6 +24,21 @@ import {
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
import { BaseProperty } from '@docmost/db/types/entity.types';
import {
FilterNode,
PropertySchema,
SearchSpec,
filterGroupSchema,
searchSchema,
validateFilterTree,
} from '../engine';
import { EventName } from '../../../common/events/event.contants';
import {
BaseRowCreatedEvent,
BaseRowDeletedEvent,
BaseRowReorderedEvent,
BaseRowUpdatedEvent,
} from '../events/base-events';
@Injectable()
export class BaseRowService {
@@ -30,19 +47,24 @@ export class BaseRowService {
private readonly baseRowRepo: BaseRowRepo,
private readonly basePropertyRepo: BasePropertyRepo,
private readonly baseViewRepo: BaseViewRepo,
private readonly eventEmitter: EventEmitter2,
) {}
async create(userId: string, workspaceId: string, dto: CreateRowDto) {
let position: string;
if (dto.afterRowId) {
const afterRow = await this.baseRowRepo.findById(dto.afterRowId);
const afterRow = await this.baseRowRepo.findById(dto.afterRowId, {
workspaceId,
});
if (!afterRow || afterRow.baseId !== dto.baseId) {
throw new BadRequestException('Invalid afterRowId');
}
position = generateJitteredKeyBetween(afterRow.position, null);
} else {
const lastPosition = await this.baseRowRepo.getLastPosition(dto.baseId);
const lastPosition = await this.baseRowRepo.getLastPosition(dto.baseId, {
workspaceId,
});
position = generateJitteredKeyBetween(lastPosition, null);
}
@@ -52,68 +74,117 @@ export class BaseRowService {
validatedCells = this.validateCells(dto.cells, properties);
}
return this.baseRowRepo.insertRow({
const created = await this.baseRowRepo.insertRow({
baseId: dto.baseId,
cells: validatedCells as any,
position,
creatorId: userId,
workspaceId,
});
const event: BaseRowCreatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId,
requestId: dto.requestId ?? null,
row: created,
};
this.eventEmitter.emit(EventName.BASE_ROW_CREATED, event);
return created;
}
async getRowInfo(rowId: string, baseId: string) {
const row = await this.baseRowRepo.findById(rowId);
async getRowInfo(rowId: string, baseId: string, workspaceId: string) {
const row = await this.baseRowRepo.findById(rowId, { workspaceId });
if (!row || row.baseId !== baseId) {
throw new NotFoundException('Row not found');
}
return row;
}
async update(dto: UpdateRowDto, userId?: string) {
const row = await this.baseRowRepo.findById(dto.rowId);
async update(dto: UpdateRowDto, workspaceId: string, userId?: string) {
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
const validatedCells = this.validateCells(dto.cells, properties);
const updated = await this.baseRowRepo.updateCells(
dto.rowId,
validatedCells,
{
baseId: dto.baseId,
workspaceId,
actorId: userId,
},
);
if (!updated) {
throw new NotFoundException('Row not found');
}
const event: BaseRowUpdatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: dto.requestId ?? null,
rowId: dto.rowId,
patch: dto.cells,
updatedCells: validatedCells,
};
this.eventEmitter.emit(EventName.BASE_ROW_UPDATED, event);
return updated;
}
async delete(dto: DeleteRowDto, workspaceId: string, userId?: string) {
const row = await this.baseRowRepo.findById(dto.rowId, { workspaceId });
if (!row || row.baseId !== dto.baseId) {
throw new NotFoundException('Row not found');
}
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
const validatedCells = this.validateCells(dto.cells, properties);
await this.baseRowRepo.softDelete(dto.rowId, {
baseId: dto.baseId,
workspaceId,
});
await this.baseRowRepo.updateCells(dto.rowId, validatedCells, userId);
return this.baseRowRepo.findById(dto.rowId);
const event: BaseRowDeletedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: dto.requestId ?? null,
rowId: dto.rowId,
};
this.eventEmitter.emit(EventName.BASE_ROW_DELETED, event);
}
async delete(rowId: string, baseId: string) {
const row = await this.baseRowRepo.findById(rowId);
if (!row || row.baseId !== baseId) {
throw new NotFoundException('Row not found');
}
await this.baseRowRepo.softDelete(rowId);
}
async list(dto: ListRowsDto, pagination: PaginationOptions) {
const hasFilters = dto.filters && dto.filters.length > 0;
const hasSorts = dto.sorts && dto.sorts.length > 0;
if (!hasFilters && !hasSorts) {
return this.baseRowRepo.findByBaseId(dto.baseId, pagination);
}
async list(
dto: ListRowsDto,
pagination: PaginationOptions,
workspaceId: string,
) {
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
const propertyTypeMap = new Map(properties.map((p) => [p.id, p.type]));
return this.baseRowRepo.findByBaseIdFiltered(
dto.baseId,
dto.filters ?? [],
dto.sorts ?? [],
propertyTypeMap,
pagination,
const schema: PropertySchema = new Map(
properties.map((p) => [p.id, p]),
);
const filter = this.normaliseFilter(dto);
const search = this.normaliseSearch(dto.search);
const sorts = dto.sorts?.map((s) => ({
propertyId: s.propertyId,
direction: s.direction,
}));
return this.baseRowRepo.list({
baseId: dto.baseId,
workspaceId,
filter,
sorts,
search,
schema,
pagination,
});
}
async reorder(dto: ReorderRowDto) {
const row = await this.baseRowRepo.findById(dto.rowId);
async reorder(dto: ReorderRowDto, workspaceId: string, userId?: string) {
const row = await this.baseRowRepo.findById(dto.rowId, { workspaceId });
if (!row || row.baseId !== dto.baseId) {
throw new NotFoundException('Row not found');
}
@@ -124,7 +195,52 @@ export class BaseRowService {
throw new BadRequestException('Invalid position value');
}
await this.baseRowRepo.updatePosition(dto.rowId, dto.position);
await this.baseRowRepo.updatePosition(dto.rowId, dto.position, {
baseId: dto.baseId,
workspaceId,
});
const event: BaseRowReorderedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: dto.requestId ?? null,
rowId: dto.rowId,
position: dto.position,
};
this.eventEmitter.emit(EventName.BASE_ROW_REORDERED, event);
}
// --- private helpers ------------------------------------------------
private normaliseFilter(dto: ListRowsDto): FilterNode | undefined {
if (!dto.filter) return undefined;
const parsed = filterGroupSchema.safeParse(dto.filter);
if (!parsed.success) {
throw new BadRequestException({
message: 'Invalid filter tree',
issues: parsed.error.issues,
});
}
try {
validateFilterTree(parsed.data);
} catch (err) {
throw new BadRequestException((err as Error).message);
}
return parsed.data;
}
private normaliseSearch(raw: unknown): SearchSpec | undefined {
if (raw == null) return undefined;
const parsed = searchSchema.safeParse(raw);
if (!parsed.success) {
throw new BadRequestException({
message: 'Invalid search spec',
issues: parsed.error.issues,
});
}
return parsed.data;
}
private validateCells(
@@ -3,15 +3,25 @@ import {
Injectable,
NotFoundException,
} from '@nestjs/common';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
import { CreateViewDto } from '../dto/create-view.dto';
import { UpdateViewDto, DeleteViewDto } from '../dto/update-view.dto';
import { viewConfigSchema } from '../base.schemas';
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
import { EventName } from '../../../common/events/event.contants';
import {
BaseViewCreatedEvent,
BaseViewDeletedEvent,
BaseViewUpdatedEvent,
} from '../events/base-events';
@Injectable()
export class BaseViewService {
constructor(private readonly baseViewRepo: BaseViewRepo) {}
constructor(
private readonly baseViewRepo: BaseViewRepo,
private readonly eventEmitter: EventEmitter2,
) {}
async create(userId: string, workspaceId: string, dto: CreateViewDto) {
let validatedConfig = {};
@@ -26,10 +36,12 @@ export class BaseViewService {
validatedConfig = result.data;
}
const lastPosition = await this.baseViewRepo.getLastPosition(dto.baseId);
const lastPosition = await this.baseViewRepo.getLastPosition(dto.baseId, {
workspaceId,
});
const position = generateJitteredKeyBetween(lastPosition, null);
return this.baseViewRepo.insertView({
const created = await this.baseViewRepo.insertView({
baseId: dto.baseId,
name: dto.name,
type: dto.type ?? 'table',
@@ -38,10 +50,21 @@ export class BaseViewService {
workspaceId,
creatorId: userId,
});
const event: BaseViewCreatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId,
requestId: null,
view: created,
};
this.eventEmitter.emit(EventName.BASE_VIEW_CREATED, event);
return created;
}
async update(dto: UpdateViewDto) {
const view = await this.baseViewRepo.findById(dto.viewId);
async update(dto: UpdateViewDto, workspaceId: string, userId?: string) {
const view = await this.baseViewRepo.findById(dto.viewId, { workspaceId });
if (!view) {
throw new NotFoundException('View not found');
}
@@ -62,17 +85,36 @@ export class BaseViewService {
validatedConfig = result.data;
}
await this.baseViewRepo.updateView(dto.viewId, {
...(dto.name !== undefined && { name: dto.name }),
...(dto.type !== undefined && { type: dto.type }),
...(validatedConfig !== undefined && { config: validatedConfig as any }),
await this.baseViewRepo.updateView(
dto.viewId,
{
...(dto.name !== undefined && { name: dto.name }),
...(dto.type !== undefined && { type: dto.type }),
...(validatedConfig !== undefined && { config: validatedConfig as any }),
},
{ workspaceId },
);
const updated = await this.baseViewRepo.findById(dto.viewId, {
workspaceId,
});
return this.baseViewRepo.findById(dto.viewId);
if (updated) {
const event: BaseViewUpdatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: null,
view: updated,
};
this.eventEmitter.emit(EventName.BASE_VIEW_UPDATED, event);
}
return updated;
}
async delete(dto: DeleteViewDto) {
const view = await this.baseViewRepo.findById(dto.viewId);
async delete(dto: DeleteViewDto, workspaceId: string, userId?: string) {
const view = await this.baseViewRepo.findById(dto.viewId, { workspaceId });
if (!view) {
throw new NotFoundException('View not found');
}
@@ -81,15 +123,26 @@ export class BaseViewService {
throw new BadRequestException('View does not belong to this base');
}
const viewCount = await this.baseViewRepo.countByBaseId(dto.baseId);
const viewCount = await this.baseViewRepo.countByBaseId(dto.baseId, {
workspaceId,
});
if (viewCount <= 1) {
throw new BadRequestException('Cannot delete the last view');
}
await this.baseViewRepo.deleteView(dto.viewId);
await this.baseViewRepo.deleteView(dto.viewId, { workspaceId });
const event: BaseViewDeletedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: null,
viewId: dto.viewId,
};
this.eventEmitter.emit(EventName.BASE_VIEW_DELETED, event);
}
async listByBaseId(baseId: string) {
return this.baseViewRepo.findByBaseId(baseId);
async listByBaseId(baseId: string, workspaceId: string) {
return this.baseViewRepo.findByBaseId(baseId, { workspaceId });
}
}
@@ -61,7 +61,7 @@ export class BaseService {
workspaceId,
creatorId: userId,
},
trx,
{ trx },
);
return this.baseRepo.findById(base.id, {
@@ -0,0 +1,35 @@
import { Logger } from '@nestjs/common';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { executeTx } from '@docmost/db/utils';
import { IBaseCellGcJob } from '../../../integrations/queue/constants/queue.interface';
const logger = new Logger('BaseCellGcTask');
/*
* Removes a soft-deleted property's key from every row in the base, then
* hard-deletes the property record. Both operations run inside a single
* transaction — without it, a failure between `removeCellKey` and
* `hardDelete` leaves rows scrubbed while the property row lingers,
* requiring manual cleanup. `removeCellKey` is a single
* `UPDATE ... SET cells = cells - $propId` statement.
*/
export async function processBaseCellGc(
db: KyselyDB,
baseRowRepo: BaseRowRepo,
basePropertyRepo: BasePropertyRepo,
data: IBaseCellGcJob,
): Promise<void> {
const { baseId, propertyId, workspaceId } = data;
await executeTx(db, async (trx) => {
await baseRowRepo.removeCellKey(baseId, propertyId, {
workspaceId,
trx,
});
await basePropertyRepo.hardDelete(propertyId, trx);
});
logger.log(`cell-gc complete base=${baseId} prop=${propertyId}`);
}
@@ -0,0 +1,203 @@
import { Logger } from '@nestjs/common';
import { KyselyDB, KyselyTransaction } from '@docmost/db/types/kysely.types';
import { dbOrTx } from '@docmost/db/utils';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import {
BasePropertyType,
BasePropertyTypeValue,
CellConversionContext,
attemptCellConversion,
} from '../base.schemas';
import { IBaseTypeConversionJob } from '../../../integrations/queue/constants/queue.interface';
const logger = new Logger('BaseTypeConversionTask');
const CHUNK_SIZE = 1000;
/*
* Handles the cell-rewrite side of a property type change on a base.
* Runs per-chunk batched UPDATEs so Node RAM stays flat regardless of row
* count. When the source type stores IDs (select / multiSelect / person /
* file), it resolves to display values before writing — fixing the
* `String(optionId)` bug that the old synchronous path produced.
*
* The `trx` option lets callers run the whole rewrite inside an outer
* transaction. That matters for the inline path in `BasePropertyService`,
* where the cell rewrite + `type` swap + `schema_version` bump must land
* atomically so readers never observe cells written for a type that hasn't
* flipped yet.
*/
export async function processBaseTypeConversion(
db: KyselyDB,
baseRowRepo: BaseRowRepo,
data: IBaseTypeConversionJob,
opts?: {
progress?: (processed: number) => Promise<void> | void;
trx?: KyselyTransaction;
},
): Promise<{ converted: number; cleared: number; total: number }> {
const {
baseId,
propertyId,
workspaceId,
fromType,
toType,
fromTypeOptions,
clearMode,
actorId,
} = data;
const progress = opts?.progress;
const trx = opts?.trx;
const queryDb = dbOrTx(db, trx);
let total = 0;
let converted = 0;
let cleared = 0;
// Only rows whose cell jsonb actually has this property key need
// rewriting — everything else is already consistent with the new type
// (empty value → empty value). Skips the full-table scan on bases
// where the property was only ever set on a few rows.
for await (const chunk of baseRowRepo.streamByBaseId(baseId, {
workspaceId,
chunkSize: CHUNK_SIZE,
trx,
withCellKey: propertyId,
})) {
const ctx = await buildCtx(
queryDb,
chunk,
propertyId,
fromType,
fromTypeOptions,
);
const updates: Array<{ id: string; patch: Record<string, unknown> }> = [];
for (const row of chunk) {
const cells = (row.cells ?? {}) as Record<string, unknown>;
if (!(propertyId in cells)) continue;
total++;
if (clearMode) {
updates.push({ id: row.id, patch: { [propertyId]: null } });
cleared++;
continue;
}
const result = attemptCellConversion(
fromType as BasePropertyTypeValue,
toType as BasePropertyTypeValue,
cells[propertyId],
ctx,
);
if (result.converted) {
converted++;
updates.push({
id: row.id,
patch: { [propertyId]: result.value ?? null },
});
} else {
cleared++;
updates.push({ id: row.id, patch: { [propertyId]: null } });
}
}
if (updates.length > 0) {
await baseRowRepo.batchUpdateCells(updates, {
baseId,
workspaceId,
actorId,
trx,
});
}
if (progress) await progress(total);
}
logger.log(
`type-conversion ${fromType}${toType} base=${baseId} prop=${propertyId} total=${total} converted=${converted} cleared=${cleared}`,
);
return { converted, cleared, total };
}
/*
* Builds the resolution context for a chunk. For select/multiSelect the
* choice map lives in the property's typeOptions (already in the job
* payload). For person and file, we batch-query the IDs present in this
* chunk.
*/
async function buildCtx(
db: KyselyDB | KyselyTransaction,
chunk: Array<{ cells: unknown }>,
propertyId: string,
fromType: string,
fromTypeOptions: unknown,
): Promise<CellConversionContext> {
const ctx: CellConversionContext = { fromTypeOptions };
if (fromType === BasePropertyType.PERSON) {
const ids = collectIds(chunk, propertyId);
if (ids.size > 0) {
const rows = await db
.selectFrom('users')
.select(['id', 'name', 'email'])
.where('id', 'in', Array.from(ids))
.execute();
ctx.userNames = new Map(
rows.map((u) => [u.id, u.name || u.email || '']),
);
}
} else if (fromType === BasePropertyType.FILE) {
const ids = collectFileIds(chunk, propertyId);
if (ids.size > 0) {
const rows = await db
.selectFrom('attachments')
.select(['id', 'fileName'])
.where('id', 'in', Array.from(ids))
.execute();
ctx.attachmentNames = new Map(rows.map((a) => [a.id, a.fileName]));
}
}
return ctx;
}
function collectIds(
chunk: Array<{ cells: unknown }>,
propertyId: string,
): Set<string> {
const out = new Set<string>();
for (const row of chunk) {
const v = (row.cells as any)?.[propertyId];
if (v == null) continue;
if (Array.isArray(v)) {
for (const item of v) {
if (typeof item === 'string' && item.length > 0) out.add(item);
}
} else if (typeof v === 'string' && v.length > 0) {
out.add(v);
}
}
return out;
}
function collectFileIds(
chunk: Array<{ cells: unknown }>,
propertyId: string,
): Set<string> {
const out = new Set<string>();
for (const row of chunk) {
const v = (row.cells as any)?.[propertyId];
if (!Array.isArray(v)) continue;
for (const f of v) {
if (typeof f === 'string' && f.length > 0) {
out.add(f);
} else if (f && typeof f === 'object' && typeof f.id === 'string') {
out.add(f.id);
}
}
}
return out;
}