feat: sync blocks - wip

This commit is contained in:
Philipinho
2026-05-04 18:08:34 +01:00
parent 17f3158a3b
commit cc343b095a
50 changed files with 4328 additions and 65 deletions
@@ -18,6 +18,7 @@ import { LoggerExtension } from './extensions/logger.extension';
import { CollaborationHandler } from './collaboration.handler';
import { CollabHistoryService } from './services/collab-history.service';
import { WatcherModule } from '../core/watcher/watcher.module';
import { TransclusionService } from '../core/page/transclusion/transclusion.service';
@Module({
providers: [
@@ -28,6 +29,7 @@ import { WatcherModule } from '../core/watcher/watcher.module';
HistoryProcessor,
CollabHistoryService,
CollaborationHandler,
TransclusionService,
],
exports: [CollaborationGateway],
imports: [TokenModule, WatcherModule],
@@ -40,6 +40,8 @@ import {
Status,
addUniqueIdsToDoc,
htmlToMarkdown,
Transclusion,
TransclusionReference,
} from '@docmost/editor-ext';
import { generateText, getSchema, JSONContent } from '@tiptap/core';
import { generateHTML, generateJSON } from '../common/helpers/prosemirror/html';
@@ -101,6 +103,8 @@ export const tiptapExtensions = [
Columns,
Column,
Status,
Transclusion,
TransclusionReference,
] as any;
export function jsonToHtml(tiptapJson: any) {
@@ -32,6 +32,7 @@ import {
HISTORY_FAST_THRESHOLD,
HISTORY_INTERVAL,
} from '../constants';
import { TransclusionService } from '../../core/page/transclusion/transclusion.service';
@Injectable()
export class PersistenceExtension implements Extension {
@@ -45,6 +46,7 @@ export class PersistenceExtension implements Extension {
@InjectQueue(QueueName.HISTORY_QUEUE) private historyQueue: Queue,
@InjectQueue(QueueName.NOTIFICATION_QUEUE) private notificationQueue: Queue,
private readonly collabHistory: CollabHistoryService,
private readonly transclusionService: TransclusionService,
) {}
async onLoadDocument(data: onLoadDocumentPayload) {
@@ -134,7 +136,11 @@ export class PersistenceExtension implements Extension {
try {
const existingContributors = page.contributorIds || [];
contributorIds = Array.from(
new Set([...existingContributors, ...editingUserIds, page.creatorId]),
new Set([
...existingContributors,
...editingUserIds,
page.creatorId,
]),
);
} catch (err) {
//this.logger.debug('Contributors error:' + err?.['message']);
@@ -158,6 +164,10 @@ export class PersistenceExtension implements Extension {
this.logger.error(`Failed to update page ${pageId}`, err);
}
if (page) {
await this.syncTransclusion(pageId, tiptapJson);
}
if (page) {
await this.collabHistory.addContributors(pageId, editingUserIds);
@@ -165,7 +175,9 @@ export class PersistenceExtension implements Extension {
const userMentions = extractUserMentions(mentions);
const oldMentions = page.content ? extractMentions(page.content) : [];
const oldMentionedUserIds = extractUserMentions(oldMentions).map((m) => m.entityId);
const oldMentionedUserIds = extractUserMentions(oldMentions).map(
(m) => m.entityId,
);
if (userMentions.length > 0) {
await this.notificationQueue.add(QueueJob.PAGE_MENTION_NOTIFICATION, {
@@ -229,4 +241,29 @@ export class PersistenceExtension implements Extension {
{ jobId: page.id, delay },
);
}
/**
* Refresh `page_transclusions` and `page_transclusion_references` to match
* the page's current content. Runs outside the page-write transaction and
* isolates each call so a failure here cannot affect the page save itself.
* The diff is idempotent — the next save converges if a round drops anything.
*/
private async syncTransclusion(
pageId: string,
tiptapJson: unknown,
): Promise<void> {
try {
await this.transclusionService.syncPageTransclusions(pageId, tiptapJson);
} catch (err) {
this.logger.error(`Failed to sync transclusions for page ${pageId}`, err);
}
try {
await this.transclusionService.syncPageReferences(pageId, tiptapJson);
} catch (err) {
this.logger.error(
`Failed to sync transclusion references for page ${pageId}`,
err,
);
}
}
}
+2 -1
View File
@@ -6,11 +6,12 @@ import { TrashCleanupService } from './services/trash-cleanup.service';
import { StorageModule } from '../../integrations/storage/storage.module';
import { CollaborationModule } from '../../collaboration/collaboration.module';
import { WatcherModule } from '../watcher/watcher.module';
import { TransclusionModule } from './transclusion/transclusion.module';
@Module({
controllers: [PageController],
providers: [PageService, PageHistoryService, TrashCleanupService],
exports: [PageService, PageHistoryService],
imports: [StorageModule, CollaborationModule, WatcherModule],
imports: [StorageModule, CollaborationModule, WatcherModule, TransclusionModule],
})
export class PageModule {}
@@ -54,6 +54,7 @@ import {
import { markdownToHtml } from '@docmost/editor-ext';
import { WatcherService } from '../../watcher/watcher.service';
import { sql } from 'kysely';
import { TransclusionService } from '../transclusion/transclusion.service';
@Injectable()
export class PageService {
@@ -71,6 +72,7 @@ export class PageService {
private eventEmitter: EventEmitter2,
private collaborationGateway: CollaborationGateway,
private readonly watcherService: WatcherService,
private readonly transclusionService: TransclusionService,
) {}
async findById(
@@ -600,6 +602,17 @@ export class PageService {
}
}
// Remap transclusion-reference source pages to their copies when
// the source page is also being duplicated in the same operation.
if (node.type.name === 'transclusionReference') {
const sourcePageId = node.attrs.sourcePageId;
if (sourcePageId && pageMap.has(sourcePageId)) {
const mappedPage = pageMap.get(sourcePageId);
//@ts-ignore
node.attrs.sourcePageId = mappedPage.newPageId;
}
}
// Update internal page links in link marks
for (const mark of node.marks) {
if (
@@ -659,6 +672,31 @@ export class PageService {
await this.db.insertInto('pages').values(insertablePages).execute();
// Extract transclusions from every duplicated page and persist them in
// one statement. Duplication bypasses Yjs onStoreDocument; brand-new
// pages never have prior rows so we can skip the diff and just bulk-insert.
try {
await this.transclusionService.insertTransclusionsForPages(
insertablePages.map((p) => ({ id: p.id, content: p.content })),
);
} catch (err) {
this.logger.error(
'Failed to insert transclusions for duplicated pages',
err,
);
}
try {
await this.transclusionService.insertReferencesForPages(
insertablePages.map((p) => ({ id: p.id, content: p.content })),
);
} catch (err) {
this.logger.error(
'Failed to insert transclusion references for duplicated pages',
err,
);
}
const insertedPageIds = insertablePages.map((page) => page.id);
this.eventEmitter.emit(EventName.PAGE_CREATED, {
pageIds: insertedPageIds,
@@ -0,0 +1,24 @@
import { Type } from 'class-transformer';
import {
ArrayMaxSize,
IsArray,
IsString,
IsUUID,
ValidateNested,
} from 'class-validator';
export class LookupReferenceDto {
@IsUUID()
sourcePageId!: string;
@IsString()
transclusionId!: string;
}
export class LookupDto {
@IsArray()
@ArrayMaxSize(50)
@ValidateNested({ each: true })
@Type(() => LookupReferenceDto)
references!: LookupReferenceDto[];
}
@@ -0,0 +1,9 @@
import { IsString, IsUUID } from 'class-validator';
export class ReferencesDto {
@IsUUID()
sourcePageId!: string;
@IsString()
transclusionId!: string;
}
@@ -0,0 +1,12 @@
import { IsString, IsUUID } from 'class-validator';
export class UnsyncReferenceDto {
@IsUUID()
referencePageId!: string;
@IsUUID()
sourcePageId!: string;
@IsString()
transclusionId!: string;
}
@@ -0,0 +1,232 @@
import {
collectReferencesFromPmJson,
collectTransclusionsFromPmJson,
} from '../utils/transclusion-prosemirror.util';
describe('collectTransclusionsFromPmJson', () => {
it('returns [] for null/undefined doc', () => {
expect(collectTransclusionsFromPmJson(null)).toEqual([]);
expect(collectTransclusionsFromPmJson(undefined)).toEqual([]);
});
it('returns [] for a doc with no transclusion nodes', () => {
const doc = {
type: 'doc',
content: [{ type: 'paragraph', content: [{ type: 'text', text: 'hi' }] }],
};
expect(collectTransclusionsFromPmJson(doc)).toEqual([]);
});
it('extracts a top-level transclusion with id, name and content', () => {
const doc = {
type: 'doc',
content: [
{
type: 'transclusion',
attrs: { id: 'abc123', name: 'Pricing' },
content: [{ type: 'paragraph', content: [{ type: 'text', text: 'Body' }] }],
},
],
};
const got = collectTransclusionsFromPmJson(doc);
expect(got).toHaveLength(1);
expect(got[0].transclusionId).toBe('abc123');
expect(got[0].name).toBe('Pricing');
expect(got[0].content).toEqual({
type: 'doc',
content: [{ type: 'paragraph', content: [{ type: 'text', text: 'Body' }] }],
});
});
it('skips transclusion nodes with no id (transient before UniqueID assigns one)', () => {
const doc = {
type: 'doc',
content: [
{ type: 'transclusion', attrs: {}, content: [{ type: 'paragraph' }] },
],
};
expect(collectTransclusionsFromPmJson(doc)).toEqual([]);
});
it('returns multiple top-level transclusions', () => {
const doc = {
type: 'doc',
content: [
{ type: 'transclusion', attrs: { id: 'a' }, content: [{ type: 'paragraph' }] },
{ type: 'transclusion', attrs: { id: 'b', name: 'Two' }, content: [{ type: 'paragraph' }] },
],
};
const got = collectTransclusionsFromPmJson(doc);
expect(got.map((e) => e.transclusionId)).toEqual(['a', 'b']);
});
it('does not recurse into a nested transclusion (transclusion cannot contain transclusion per schema, but be defensive)', () => {
const doc = {
type: 'doc',
content: [
{
type: 'transclusion',
attrs: { id: 'outer' },
content: [
{
type: 'transclusion',
attrs: { id: 'inner' },
content: [{ type: 'paragraph' }],
},
],
},
],
};
const got = collectTransclusionsFromPmJson(doc);
expect(got.map((e) => e.transclusionId)).toEqual(['outer']);
});
it('finds transclusions nested inside other block containers (e.g. column)', () => {
const doc = {
type: 'doc',
content: [
{
type: 'column',
content: [
{ type: 'transclusion', attrs: { id: 'inCol' }, content: [{ type: 'paragraph' }] },
],
},
],
};
expect(collectTransclusionsFromPmJson(doc).map((e) => e.transclusionId)).toEqual([
'inCol',
]);
});
it('uses the last id when duplicate ids appear (later wins, deterministic)', () => {
const doc = {
type: 'doc',
content: [
{ type: 'transclusion', attrs: { id: 'dup', name: 'first' }, content: [{ type: 'paragraph' }] },
{ type: 'transclusion', attrs: { id: 'dup', name: 'second' }, content: [{ type: 'paragraph' }] },
],
};
const got = collectTransclusionsFromPmJson(doc);
expect(got).toHaveLength(1);
expect(got[0].name).toBe('second');
});
});
describe('collectReferencesFromPmJson', () => {
it('returns [] for null/undefined doc', () => {
expect(collectReferencesFromPmJson(null)).toEqual([]);
expect(collectReferencesFromPmJson(undefined)).toEqual([]);
});
it('returns [] for a doc with no transclusionReference nodes', () => {
const doc = {
type: 'doc',
content: [
{ type: 'paragraph', content: [{ type: 'text', text: 'hi' }] },
],
};
expect(collectReferencesFromPmJson(doc)).toEqual([]);
});
it('extracts a top-level reference', () => {
const doc = {
type: 'doc',
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p1', transclusionId: 'e1' },
},
],
};
expect(collectReferencesFromPmJson(doc)).toEqual([
{ containingTransclusionId: null, sourcePageId: 'p1', transclusionId: 'e1' },
]);
});
it('skips references missing sourcePageId or transclusionId', () => {
const doc = {
type: 'doc',
content: [
{ type: 'transclusionReference', attrs: { transclusionId: 'e1' } },
{ type: 'transclusionReference', attrs: { sourcePageId: 'p1' } },
{ type: 'transclusionReference', attrs: {} },
],
};
expect(collectReferencesFromPmJson(doc)).toEqual([]);
});
it('finds references nested in other block containers (column, callout, etc.)', () => {
const doc = {
type: 'doc',
content: [
{
type: 'column',
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p1', transclusionId: 'e1' },
},
],
},
{
type: 'callout',
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p2', transclusionId: 'e2' },
},
],
},
],
};
expect(collectReferencesFromPmJson(doc)).toEqual([
{ containingTransclusionId: null, sourcePageId: 'p1', transclusionId: 'e1' },
{ containingTransclusionId: null, sourcePageId: 'p2', transclusionId: 'e2' },
]);
});
it('also finds references nested inside a transclusion (source) node', () => {
const doc = {
type: 'doc',
content: [
{
type: 'transclusion',
attrs: { id: 'src1' },
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p1', transclusionId: 'e1' },
},
],
},
],
};
expect(collectReferencesFromPmJson(doc)).toEqual([
{ containingTransclusionId: 'src1', sourcePageId: 'p1', transclusionId: 'e1' },
]);
});
it('dedupes identical (containingTransclusionId, sourcePageId, transclusionId) triples', () => {
const doc = {
type: 'doc',
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p1', transclusionId: 'e1' },
},
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p1', transclusionId: 'e1' },
},
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p2', transclusionId: 'e2' },
},
],
};
expect(collectReferencesFromPmJson(doc)).toEqual([
{ containingTransclusionId: null, sourcePageId: 'p1', transclusionId: 'e1' },
{ containingTransclusionId: null, sourcePageId: 'p2', transclusionId: 'e2' },
]);
});
});
@@ -0,0 +1,161 @@
import {
rewriteAttachmentsForUnsync,
type AttachmentRewritePlan,
} from '../utils/transclusion-unsync.util';
describe('rewriteAttachmentsForUnsync', () => {
const fixedIds = (() => {
let i = 0;
return () => `new-${++i}`;
});
it('returns content unchanged when no attachment nodes are present', () => {
const content = {
type: 'doc',
content: [
{ type: 'paragraph', content: [{ type: 'text', text: 'hello' }] },
],
};
const r = rewriteAttachmentsForUnsync(content, fixedIds());
expect(r.content).toEqual(content);
expect(r.copies).toEqual([]);
});
it('rewrites attachmentId and src on a single image node', () => {
const oldId = '11111111-1111-1111-1111-111111111111';
const content = {
type: 'doc',
content: [
{
type: 'image',
attrs: {
attachmentId: oldId,
src: `/api/files/${oldId}/cat.png`,
},
},
],
};
const gen = fixedIds();
const r = rewriteAttachmentsForUnsync(content, gen);
expect(r.copies).toHaveLength(1);
const plan: AttachmentRewritePlan = r.copies[0];
expect(plan.oldAttachmentId).toBe(oldId);
expect(plan.newAttachmentId).toBe('new-1');
const img = (r.content as any).content[0];
expect(img.attrs.attachmentId).toBe('new-1');
expect(img.attrs.src).toBe('/api/files/new-1/cat.png');
});
it('rewrites every attachment node type (image, video, audio, attachment, drawio, excalidraw, pdf)', () => {
const types = [
'image',
'video',
'audio',
'attachment',
'drawio',
'excalidraw',
'pdf',
] as const;
const content = {
type: 'doc',
content: types.map((t, i) => ({
type: t,
attrs: {
attachmentId: `old-${i}`,
src: `/api/files/old-${i}/file`,
},
})),
};
const r = rewriteAttachmentsForUnsync(content, fixedIds());
expect(r.copies).toHaveLength(types.length);
expect((r.content as any).content.map((n: any) => n.attrs.attachmentId)).toEqual(
Array.from({ length: types.length }, (_, i) => `new-${i + 1}`),
);
});
it('reuses one new id per old attachmentId across nodes (dedupe)', () => {
const shared = 'shared-old';
const content = {
type: 'doc',
content: [
{
type: 'image',
attrs: {
attachmentId: shared,
src: `/api/files/${shared}/a.png`,
},
},
{
type: 'image',
attrs: {
attachmentId: shared,
src: `/api/files/${shared}/a.png`,
},
},
],
};
const r = rewriteAttachmentsForUnsync(content, fixedIds());
expect(r.copies).toHaveLength(1);
expect(r.copies[0].oldAttachmentId).toBe(shared);
const newId = r.copies[0].newAttachmentId;
expect((r.content as any).content[0].attrs.attachmentId).toBe(newId);
expect((r.content as any).content[1].attrs.attachmentId).toBe(newId);
});
it('does not mutate the input content object', () => {
const content = {
type: 'doc',
content: [
{
type: 'image',
attrs: { attachmentId: 'old-x', src: '/api/files/old-x/x.png' },
},
],
};
const snapshot = JSON.parse(JSON.stringify(content));
rewriteAttachmentsForUnsync(content, fixedIds());
expect(content).toEqual(snapshot);
});
it('skips nodes whose attachmentId is missing or not a uuid-shaped string', () => {
const content = {
type: 'doc',
content: [
{ type: 'image', attrs: {} },
{ type: 'image', attrs: { attachmentId: '' } },
],
};
const r = rewriteAttachmentsForUnsync(content, fixedIds());
expect(r.copies).toEqual([]);
expect(r.content).toEqual(content);
});
it('recurses into nested containers (column, callout)', () => {
const oldId = 'old-nested';
const content = {
type: 'doc',
content: [
{
type: 'callout',
content: [
{
type: 'image',
attrs: {
attachmentId: oldId,
src: `/api/files/${oldId}/x.png`,
},
},
],
},
],
};
const r = rewriteAttachmentsForUnsync(content, fixedIds());
expect(r.copies).toHaveLength(1);
const newId = r.copies[0].newAttachmentId;
const inner = (r.content as any).content[0].content[0];
expect(inner.attrs.attachmentId).toBe(newId);
expect(inner.attrs.src).toBe(`/api/files/${newId}/x.png`);
});
});
@@ -0,0 +1,78 @@
import { Test } from '@nestjs/testing';
import { TransclusionController } from '../transclusion.controller';
import { TransclusionService } from '../transclusion.service';
import { JwtAuthGuard } from '../../../../common/guards/jwt-auth.guard';
describe('TransclusionController.lookup', () => {
let controller: TransclusionController;
let service: jest.Mocked<TransclusionService>;
beforeEach(async () => {
service = {
lookup: jest.fn(),
listReferences: jest.fn(),
unsyncReference: jest.fn(),
} as any;
const module = await Test.createTestingModule({
controllers: [TransclusionController],
providers: [{ provide: TransclusionService, useValue: service }],
})
.overrideGuard(JwtAuthGuard)
.useValue({ canActivate: () => true })
.compile();
controller = module.get(TransclusionController);
});
const user = { id: 'u1' } as any;
const ref = { sourcePageId: 'p1', transclusionId: 'e1' };
it('returns content when lookup succeeds', async () => {
service.lookup.mockResolvedValue({
items: [
{
sourcePageId: 'p1',
transclusionId: 'e1',
content: { type: 'doc' },
sourceUpdatedAt: new Date(),
},
],
} as any);
const out = await controller.lookup({ references: [ref] } as any, user);
expect(out.items[0]).not.toHaveProperty('status');
expect((out.items[0] as any).content).toEqual({ type: 'doc' });
expect(service.lookup).toHaveBeenCalledWith([ref], 'u1');
});
it('returns no_access when service says no_access', async () => {
service.lookup.mockResolvedValue({
items: [
{
sourcePageId: 'p1',
transclusionId: 'e1',
status: 'no_access',
},
],
} as any);
const out = await controller.lookup({ references: [ref] } as any, user);
expect((out.items[0] as { status?: string }).status).toBe('no_access');
});
it('returns not_found when service says not_found', async () => {
service.lookup.mockResolvedValue({
items: [
{
sourcePageId: 'p1',
transclusionId: 'e1',
status: 'not_found',
},
],
} as any);
const out = await controller.lookup({ references: [ref] } as any, user);
expect((out.items[0] as { status?: string }).status).toBe('not_found');
});
});
@@ -0,0 +1,412 @@
import { Test } from '@nestjs/testing';
import { TransclusionService } from '../transclusion.service';
import { PageTransclusionsRepo } from '@docmost/db/repos/page-transclusions/page-transclusions.repo';
import { PageTransclusionReferencesRepo } from '@docmost/db/repos/page-transclusion-references/page-transclusion-references.repo';
import { PageRepo } from '@docmost/db/repos/page/page.repo';
import { PagePermissionRepo } from '@docmost/db/repos/page/page-permission.repo';
import { AttachmentRepo } from '@docmost/db/repos/attachment/attachment.repo';
import { StorageService } from '../../../../integrations/storage/storage.service';
describe('TransclusionService.syncPageTransclusions', () => {
let service: TransclusionService;
let repo: jest.Mocked<PageTransclusionsRepo>;
beforeEach(async () => {
const mockRepo: jest.Mocked<Partial<PageTransclusionsRepo>> = {
findByPageId: jest.fn(),
insert: jest.fn(),
update: jest.fn(),
deleteByPageAndTransclusionIds: jest.fn(),
};
const module = await Test.createTestingModule({
providers: [
TransclusionService,
{ provide: PageTransclusionsRepo, useValue: mockRepo },
{ provide: PageTransclusionReferencesRepo, useValue: {} },
{ provide: PageRepo, useValue: {} },
{ provide: PagePermissionRepo, useValue: {} },
{ provide: AttachmentRepo, useValue: {} },
{ provide: StorageService, useValue: {} },
],
}).compile();
service = module.get(TransclusionService);
repo = module.get(PageTransclusionsRepo);
});
const pageId = '00000000-0000-0000-0000-000000000001';
it('inserts new transclusions that did not exist before', async () => {
repo.findByPageId.mockResolvedValue([]);
const pm = {
type: 'doc',
content: [
{
type: 'transclusion',
attrs: { id: 'a', name: 'Hello' },
content: [{ type: 'paragraph' }],
},
],
};
const result = await service.syncPageTransclusions(pageId, pm);
expect(result).toEqual({ inserted: 1, updated: 0, deleted: 0 });
expect(repo.insert).toHaveBeenCalledTimes(1);
expect(repo.insert).toHaveBeenCalledWith(
expect.objectContaining({
pageId,
transclusionId: 'a',
name: 'Hello',
}),
undefined,
);
expect(repo.update).not.toHaveBeenCalled();
expect(repo.deleteByPageAndTransclusionIds).not.toHaveBeenCalled();
});
it('updates transclusions whose name or content changed', async () => {
repo.findByPageId.mockResolvedValue([
{
id: 'row1',
pageId,
transclusionId: 'a',
name: 'Old',
content: { type: 'doc', content: [{ type: 'paragraph' }] },
createdAt: new Date(),
updatedAt: new Date(),
} as any,
]);
const pm = {
type: 'doc',
content: [
{
type: 'transclusion',
attrs: { id: 'a', name: 'New' },
content: [
{ type: 'paragraph', content: [{ type: 'text', text: 'X' }] },
],
},
],
};
const result = await service.syncPageTransclusions(pageId, pm);
expect(result).toEqual({ inserted: 0, updated: 1, deleted: 0 });
expect(repo.update).toHaveBeenCalledWith(
pageId,
'a',
expect.objectContaining({ name: 'New' }),
undefined,
);
});
it('skips update when name and content are unchanged', async () => {
const sameContent = {
type: 'doc',
content: [{ type: 'paragraph' }],
};
repo.findByPageId.mockResolvedValue([
{
id: 'row1',
pageId,
transclusionId: 'a',
name: 'Same',
content: sameContent,
createdAt: new Date(),
updatedAt: new Date(),
} as any,
]);
const pm = {
type: 'doc',
content: [
{
type: 'transclusion',
attrs: { id: 'a', name: 'Same' },
content: sameContent.content,
},
],
};
const result = await service.syncPageTransclusions(pageId, pm);
expect(result).toEqual({ inserted: 0, updated: 0, deleted: 0 });
expect(repo.update).not.toHaveBeenCalled();
});
it('deletes transclusions that no longer appear in the doc', async () => {
repo.findByPageId.mockResolvedValue([
{
id: 'r',
pageId,
transclusionId: 'gone',
name: null,
content: { type: 'doc', content: [] },
createdAt: new Date(),
updatedAt: new Date(),
} as any,
]);
const pm = { type: 'doc', content: [{ type: 'paragraph' }] };
const result = await service.syncPageTransclusions(pageId, pm);
expect(result).toEqual({ inserted: 0, updated: 0, deleted: 1 });
expect(repo.deleteByPageAndTransclusionIds).toHaveBeenCalledWith(
pageId,
['gone'],
undefined,
);
});
it('handles empty doc → noop', async () => {
repo.findByPageId.mockResolvedValue([]);
const result = await service.syncPageTransclusions(pageId, null);
expect(result).toEqual({ inserted: 0, updated: 0, deleted: 0 });
expect(repo.insert).not.toHaveBeenCalled();
expect(repo.update).not.toHaveBeenCalled();
expect(repo.deleteByPageAndTransclusionIds).not.toHaveBeenCalled();
});
it('passes through the trx parameter to repo calls', async () => {
repo.findByPageId.mockResolvedValue([]);
const trx = { mock: 'trx' } as any;
const pm = {
type: 'doc',
content: [
{ type: 'transclusion', attrs: { id: 'a' }, content: [{ type: 'paragraph' }] },
],
};
await service.syncPageTransclusions(pageId, pm, trx);
expect(repo.findByPageId).toHaveBeenCalledWith(pageId, trx);
expect(repo.insert).toHaveBeenCalledWith(expect.anything(), trx);
});
});
describe('TransclusionService.syncPageReferences', () => {
let service: TransclusionService;
let refRepo: jest.Mocked<PageTransclusionReferencesRepo>;
beforeEach(async () => {
const mockTransclusionsRepo: Partial<PageTransclusionsRepo> = {};
const mockRefRepo: jest.Mocked<Partial<PageTransclusionReferencesRepo>> = {
findByReferencePageId: jest.fn(),
insertMany: jest.fn(),
deleteByReferenceAndKeys: jest.fn(),
findCyclicEdgesForSource: jest.fn().mockResolvedValue([]),
deleteByIds: jest.fn(),
};
const module = await Test.createTestingModule({
providers: [
TransclusionService,
{ provide: PageTransclusionsRepo, useValue: mockTransclusionsRepo },
{ provide: PageTransclusionReferencesRepo, useValue: mockRefRepo },
{ provide: PageRepo, useValue: {} },
{ provide: PagePermissionRepo, useValue: {} },
{ provide: AttachmentRepo, useValue: {} },
{ provide: StorageService, useValue: {} },
],
}).compile();
service = module.get(TransclusionService);
refRepo = module.get(PageTransclusionReferencesRepo);
});
const referencePageId = '00000000-0000-0000-0000-000000000001';
it('inserts new loose references, no deletes when none existed', async () => {
refRepo.findByReferencePageId.mockResolvedValue([]);
const pm = {
type: 'doc',
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p1', transclusionId: 'e1' },
},
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p2', transclusionId: 'e2' },
},
],
};
const result = await service.syncPageReferences(referencePageId, pm);
expect(result).toEqual({ inserted: 2, deleted: 0 });
expect(refRepo.insertMany).toHaveBeenCalledWith(
[
{
referencePageId,
containingTransclusionId: null,
sourcePageId: 'p1',
transclusionId: 'e1',
},
{
referencePageId,
containingTransclusionId: null,
sourcePageId: 'p2',
transclusionId: 'e2',
},
],
undefined,
);
expect(refRepo.deleteByReferenceAndKeys).not.toHaveBeenCalled();
// Loose references never seed cycle detection.
expect(refRepo.findCyclicEdgesForSource).not.toHaveBeenCalled();
});
it('records the containing transclusion when references nest in a source', async () => {
refRepo.findByReferencePageId.mockResolvedValue([]);
const pm = {
type: 'doc',
content: [
{
type: 'transclusion',
attrs: { id: 's1' },
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p2', transclusionId: 'e2' },
},
],
},
],
};
const result = await service.syncPageReferences(referencePageId, pm);
expect(result).toEqual({ inserted: 1, deleted: 0 });
expect(refRepo.insertMany).toHaveBeenCalledWith(
[
{
referencePageId,
containingTransclusionId: 's1',
sourcePageId: 'p2',
transclusionId: 'e2',
},
],
undefined,
);
expect(refRepo.findCyclicEdgesForSource).toHaveBeenCalledWith(
'p2',
'e2',
undefined,
);
});
it('deletes edges that close a cycle and excludes them from the inserted count', async () => {
refRepo.findByReferencePageId.mockResolvedValue([]);
refRepo.findCyclicEdgesForSource.mockResolvedValue([
{
id: 'closing-edge-id',
referencePageId,
containingTransclusionId: 's1',
sourcePageId: 'p2',
transclusionId: 'e2',
createdAt: new Date(),
} as any,
]);
const pm = {
type: 'doc',
content: [
{
type: 'transclusion',
attrs: { id: 's1' },
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p2', transclusionId: 'e2' },
},
],
},
],
};
const result = await service.syncPageReferences(referencePageId, pm);
expect(result).toEqual({ inserted: 0, deleted: 0 });
expect(refRepo.deleteByIds).toHaveBeenCalledWith(
['closing-edge-id'],
undefined,
);
});
it('deletes references that no longer appear', async () => {
refRepo.findByReferencePageId.mockResolvedValue([
{
id: 'r1',
referencePageId,
containingTransclusionId: null,
sourcePageId: 'p1',
transclusionId: 'e1',
createdAt: new Date(),
} as any,
]);
const pm = { type: 'doc', content: [{ type: 'paragraph' }] };
const result = await service.syncPageReferences(referencePageId, pm);
expect(result).toEqual({ inserted: 0, deleted: 1 });
expect(refRepo.deleteByReferenceAndKeys).toHaveBeenCalledWith(
referencePageId,
[
{
containingTransclusionId: null,
sourcePageId: 'p1',
transclusionId: 'e1',
},
],
undefined,
);
expect(refRepo.insertMany).not.toHaveBeenCalled();
});
it('is a no-op when desired matches existing exactly', async () => {
refRepo.findByReferencePageId.mockResolvedValue([
{
id: 'r',
referencePageId,
containingTransclusionId: null,
sourcePageId: 'p1',
transclusionId: 'e1',
createdAt: new Date(),
} as any,
]);
const pm = {
type: 'doc',
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p1', transclusionId: 'e1' },
},
],
};
const result = await service.syncPageReferences(referencePageId, pm);
expect(result).toEqual({ inserted: 0, deleted: 0 });
expect(refRepo.insertMany).not.toHaveBeenCalled();
expect(refRepo.deleteByReferenceAndKeys).not.toHaveBeenCalled();
});
it('passes through trx parameter to repo calls', async () => {
refRepo.findByReferencePageId.mockResolvedValue([]);
const trx = { mock: 'trx' } as any;
const pm = {
type: 'doc',
content: [
{
type: 'transclusionReference',
attrs: { sourcePageId: 'p1', transclusionId: 'e1' },
},
],
};
await service.syncPageReferences(referencePageId, pm, trx);
expect(refRepo.findByReferencePageId).toHaveBeenCalledWith(
referencePageId,
trx,
);
expect(refRepo.insertMany).toHaveBeenCalledWith(expect.anything(), trx);
});
});
@@ -0,0 +1,57 @@
import {
Body,
Controller,
HttpCode,
HttpStatus,
Post,
UseGuards,
} from '@nestjs/common';
import { JwtAuthGuard } from '../../../common/guards/jwt-auth.guard';
import { AuthUser } from '../../../common/decorators/auth-user.decorator';
import { User } from '@docmost/db/types/entity.types';
import { TransclusionService } from './transclusion.service';
import { LookupDto } from './dto/lookup.dto';
import { ReferencesDto } from './dto/references.dto';
import { UnsyncReferenceDto } from './dto/unsync-reference.dto';
@UseGuards(JwtAuthGuard)
@Controller('pages/transclusion')
export class TransclusionController {
constructor(private readonly transclusionService: TransclusionService) {}
@HttpCode(HttpStatus.OK)
@Post('lookup')
async lookup(@Body() dto: LookupDto, @AuthUser() user: User) {
return this.transclusionService.lookup(
dto.references,
user?.id ?? null,
);
}
@HttpCode(HttpStatus.OK)
@Post('references')
async references(
@Body() dto: ReferencesDto,
@AuthUser() user: User,
) {
return this.transclusionService.listReferences({
sourcePageId: dto.sourcePageId,
transclusionId: dto.transclusionId,
viewerUserId: user.id,
});
}
@HttpCode(HttpStatus.OK)
@Post('unsync-reference')
async unsyncReference(
@Body() dto: UnsyncReferenceDto,
@AuthUser() user: User,
) {
return this.transclusionService.unsyncReference(
dto.referencePageId,
dto.sourcePageId,
dto.transclusionId,
user.id,
);
}
}
@@ -0,0 +1,12 @@
import { Module } from '@nestjs/common';
import { TransclusionController } from './transclusion.controller';
import { TransclusionService } from './transclusion.service';
import { StorageModule } from '../../../integrations/storage/storage.module';
@Module({
imports: [StorageModule],
controllers: [TransclusionController],
providers: [TransclusionService],
exports: [TransclusionService],
})
export class TransclusionModule {}
@@ -0,0 +1,526 @@
import {
Injectable,
Logger,
ForbiddenException,
NotFoundException,
} from '@nestjs/common';
import { isDeepStrictEqual } from 'node:util';
import { v7 as uuid7 } from 'uuid';
import { KyselyTransaction } from '@docmost/db/types/kysely.types';
import { PageTransclusionsRepo } from '@docmost/db/repos/page-transclusions/page-transclusions.repo';
import { PageTransclusionReferencesRepo } from '@docmost/db/repos/page-transclusion-references/page-transclusion-references.repo';
import { PageRepo } from '@docmost/db/repos/page/page.repo';
import { PagePermissionRepo } from '@docmost/db/repos/page/page-permission.repo';
import { AttachmentRepo } from '@docmost/db/repos/attachment/attachment.repo';
import { StorageService } from '../../../integrations/storage/storage.service';
import {
collectReferencesFromPmJson,
collectTransclusionsFromPmJson,
} from './utils/transclusion-prosemirror.util';
import { rewriteAttachmentsForUnsync } from './utils/transclusion-unsync.util';
import { TransclusionLookup } from './transclusion.types';
import { Page } from '@docmost/db/types/entity.types';
type ReferencingPageInfo = {
id: string;
slugId: string;
title: string | null;
icon: string | null;
spaceId: string;
spaceSlug: string | null;
};
@Injectable()
export class TransclusionService {
private readonly logger = new Logger(TransclusionService.name);
constructor(
private readonly pageTransclusionsRepo: PageTransclusionsRepo,
private readonly pageTransclusionReferencesRepo: PageTransclusionReferencesRepo,
private readonly pageRepo: PageRepo,
private readonly pagePermissionRepo: PagePermissionRepo,
private readonly attachmentRepo: AttachmentRepo,
private readonly storageService: StorageService,
) {}
async syncPageTransclusions(
pageId: string,
pmJson: unknown,
trx?: KyselyTransaction,
): Promise<{ inserted: number; updated: number; deleted: number }> {
const desired = collectTransclusionsFromPmJson(pmJson);
const desiredById = new Map(desired.map((d) => [d.transclusionId, d]));
const existing = await this.pageTransclusionsRepo.findByPageId(pageId, trx);
const existingById = new Map(existing.map((e) => [e.transclusionId, e]));
let inserted = 0;
let updated = 0;
let deleted = 0;
for (const d of desired) {
const prev = existingById.get(d.transclusionId);
if (!prev) {
await this.pageTransclusionsRepo.insert(
{
pageId,
transclusionId: d.transclusionId,
name: d.name,
content: d.content as any,
},
trx,
);
inserted += 1;
continue;
}
const nameChanged = prev.name !== d.name;
const contentChanged = !isDeepStrictEqual(prev.content, d.content);
if (nameChanged || contentChanged) {
await this.pageTransclusionsRepo.update(
pageId,
d.transclusionId,
{ name: d.name, content: d.content as any },
trx,
);
updated += 1;
}
}
const removedIds = existing
.filter((e) => !desiredById.has(e.transclusionId))
.map((e) => e.transclusionId);
if (removedIds.length > 0) {
await this.pageTransclusionsRepo.deleteByPageAndTransclusionIds(
pageId,
removedIds,
trx,
);
deleted = removedIds.length;
}
return { inserted, updated, deleted };
}
async syncPageReferences(
referencePageId: string,
pmJson: unknown,
trx?: KyselyTransaction,
): Promise<{ inserted: number; deleted: number }> {
const desired = collectReferencesFromPmJson(pmJson);
const keyOf = (s: {
containingTransclusionId: string | null;
sourcePageId: string;
transclusionId: string;
}) =>
`${s.containingTransclusionId ?? ''}::${s.sourcePageId}::${s.transclusionId}`;
const desiredKeys = new Set(desired.map(keyOf));
const existing = await this.pageTransclusionReferencesRepo.findByReferencePageId(
referencePageId,
trx,
);
const existingKeys = new Set(existing.map(keyOf));
const toInsert = desired
.filter((d) => !existingKeys.has(keyOf(d)))
.map((d) => ({
referencePageId,
containingTransclusionId: d.containingTransclusionId,
sourcePageId: d.sourcePageId,
transclusionId: d.transclusionId,
}));
const toDelete = existing
.filter((e) => !desiredKeys.has(keyOf(e)))
.map((e) => ({
containingTransclusionId: e.containingTransclusionId,
sourcePageId: e.sourcePageId,
transclusionId: e.transclusionId,
}));
if (toInsert.length > 0) {
await this.pageTransclusionReferencesRepo.insertMany(toInsert, trx);
}
if (toDelete.length > 0) {
await this.pageTransclusionReferencesRepo.deleteByReferenceAndKeys(
referencePageId,
toDelete,
trx,
);
}
const removedCount = await this.removeCyclicEdgesIntroducedBy(
toInsert,
trx,
);
return {
inserted: toInsert.length - removedCount,
deleted: toDelete.length,
};
}
/**
* Run cycle detection rooted at each newly-introduced edge's target and
* delete any closing edge that belongs to a cycle. Lookups for those rows
* then return `not_found`, which the editor renders as the cycle-aware
* placeholder. Returns the count of rows removed.
*/
private async removeCyclicEdgesIntroducedBy(
candidates: ReadonlyArray<{
referencePageId: string;
containingTransclusionId: string | null;
sourcePageId: string;
transclusionId: string;
}>,
trx?: KyselyTransaction,
): Promise<number> {
const seedKeys = new Set<string>();
const seeds: Array<{ sourcePageId: string; transclusionId: string }> = [];
for (const c of candidates) {
if (c.containingTransclusionId === null) continue;
const key = `${c.sourcePageId}::${c.transclusionId}`;
if (seedKeys.has(key)) continue;
seedKeys.add(key);
seeds.push({
sourcePageId: c.sourcePageId,
transclusionId: c.transclusionId,
});
}
if (seeds.length === 0) return 0;
const offendingIds = new Set<string>();
for (const seed of seeds) {
const cyclicEdges =
await this.pageTransclusionReferencesRepo.findCyclicEdgesForSource(
seed.sourcePageId,
seed.transclusionId,
trx,
);
for (const edge of cyclicEdges) offendingIds.add(edge.id);
}
if (offendingIds.size === 0) return 0;
await this.pageTransclusionReferencesRepo.deleteByIds(
Array.from(offendingIds),
trx,
);
return offendingIds.size;
}
/**
* Extract transclusions from each page's PM JSON and bulk-insert into
* `page_transclusions` in a single statement. Intended for brand-new pages
* (e.g. duplication, import) where there is nothing to diff against.
*/
async insertTransclusionsForPages(
pages: Array<{ id: string; content: unknown }>,
trx?: KyselyTransaction,
): Promise<{ inserted: number }> {
const rows: Parameters<PageTransclusionsRepo['insertMany']>[0] = [];
for (const page of pages) {
const snapshots = collectTransclusionsFromPmJson(page.content);
for (const s of snapshots) {
rows.push({
pageId: page.id,
transclusionId: s.transclusionId,
name: s.name,
content: s.content as any,
});
}
}
if (rows.length === 0) return { inserted: 0 };
await this.pageTransclusionsRepo.insertMany(rows, trx);
return { inserted: rows.length };
}
/**
* Walk each page's PM JSON for `transclusionReference` nodes and bulk-insert
* one row per `(containing, source, target)` triple. For brand-new pages
* (duplication, import) where there is nothing to diff against.
*
* Cycle detection runs once per distinct seed source after the bulk insert;
* any closing edges are removed so lookups return `not_found` and the
* editor renders the cycle-aware placeholder.
*/
async insertReferencesForPages(
pages: Array<{ id: string; content: unknown }>,
trx?: KyselyTransaction,
): Promise<{ inserted: number }> {
const rows: Array<{
referencePageId: string;
containingTransclusionId: string | null;
sourcePageId: string;
transclusionId: string;
}> = [];
for (const page of pages) {
const refs = collectReferencesFromPmJson(page.content);
for (const r of refs) {
rows.push({
referencePageId: page.id,
containingTransclusionId: r.containingTransclusionId,
sourcePageId: r.sourcePageId,
transclusionId: r.transclusionId,
});
}
}
if (rows.length === 0) return { inserted: 0 };
await this.pageTransclusionReferencesRepo.insertMany(rows, trx);
const removedCount = await this.removeCyclicEdgesIntroducedBy(rows, trx);
return { inserted: rows.length - removedCount };
}
async lookup(
references: Array<{ sourcePageId: string; transclusionId: string }>,
viewerUserId: string | null,
): Promise<{ items: TransclusionLookup[] }> {
if (references.length === 0) return { items: [] };
const items: TransclusionLookup[] = new Array(references.length).fill(null);
const pendingIdx = references.map((_, i) => i);
// 1) permission filter on the candidate pageIds (auth users only;
// unauthenticated share viewers get no_access for any private page).
const candidatePageIds = Array.from(
new Set(pendingIdx.map((i) => references[i].sourcePageId)),
);
const accessibleSet = viewerUserId
? new Set(
await this.pagePermissionRepo.filterAccessiblePageIds({
pageIds: candidatePageIds,
userId: viewerUserId,
}),
)
: new Set<string>();
// 2) one DB hit for all (page_id, transclusion_id) keys still pending and accessible
const accessiblePending = pendingIdx.filter((i) =>
accessibleSet.has(references[i].sourcePageId),
);
const rows = await this.pageTransclusionsRepo.findManyByPageAndTransclusion(
accessiblePending.map((i) => ({
pageId: references[i].sourcePageId,
transclusionId: references[i].transclusionId,
})),
);
const rowKey = (r: { pageId: string; transclusionId: string }) =>
`${r.pageId}::${r.transclusionId}`;
const rowMap = new Map(rows.map((r) => [rowKey(r), r]));
// 3) pull updatedAt from each accessible page so we can return
// sourceUpdatedAt on each successful result.
const accessiblePageIds = Array.from(
new Set(accessiblePending.map((i) => references[i].sourcePageId)),
);
const pageMeta = new Map<string, Date>();
for (const pid of accessiblePageIds) {
const p = await this.pageRepo.findById(pid);
if (p && !p.deletedAt) pageMeta.set(p.id, p.updatedAt);
}
// 4) stitch the results
for (const i of pendingIdx) {
const ref = references[i];
if (!accessibleSet.has(ref.sourcePageId)) {
items[i] = {
sourcePageId: ref.sourcePageId,
transclusionId: ref.transclusionId,
status: 'no_access',
};
continue;
}
const updatedAt = pageMeta.get(ref.sourcePageId);
if (!updatedAt) {
items[i] = {
sourcePageId: ref.sourcePageId,
transclusionId: ref.transclusionId,
status: 'not_found',
};
continue;
}
const row = rowMap.get(`${ref.sourcePageId}::${ref.transclusionId}`);
if (!row) {
items[i] = {
sourcePageId: ref.sourcePageId,
transclusionId: ref.transclusionId,
status: 'not_found',
};
continue;
}
items[i] = {
sourcePageId: ref.sourcePageId,
transclusionId: ref.transclusionId,
content: row.content,
sourceUpdatedAt: updatedAt,
};
}
return { items };
}
async listReferences(opts: {
sourcePageId: string;
transclusionId: string;
viewerUserId: string;
}): Promise<{
source: ReferencingPageInfo | null;
references: ReferencingPageInfo[];
}> {
const { sourcePageId, transclusionId, viewerUserId } = opts;
const referencePageIds =
await this.pageTransclusionReferencesRepo.findReferencePageIdsByTransclusion(
sourcePageId,
transclusionId,
);
const candidatePageIds = Array.from(
new Set([sourcePageId, ...referencePageIds]),
);
const accessibleSet = new Set(
await this.pagePermissionRepo.filterAccessiblePageIds({
pageIds: candidatePageIds,
userId: viewerUserId,
}),
);
const accessibleIds = candidatePageIds.filter((id) =>
accessibleSet.has(id),
);
if (accessibleIds.length === 0) {
return { source: null, references: [] };
}
const rows = await Promise.all(
accessibleIds.map((id) =>
this.pageRepo.findById(id, { includeSpace: true }),
),
);
const byId = new Map<string, ReferencingPageInfo>();
for (const p of rows) {
if (!p || p.deletedAt) continue;
const space = (p as Page & { space?: { slug?: string } }).space;
byId.set(p.id, {
id: p.id,
slugId: p.slugId,
title: p.title ?? null,
icon: p.icon ?? null,
spaceId: p.spaceId,
spaceSlug: space?.slug ?? null,
});
}
const source = byId.get(sourcePageId) ?? null;
const references = referencePageIds
.map((id) => byId.get(id))
.filter((p): p is ReferencingPageInfo => Boolean(p));
return { source, references };
}
/**
* Convert a `transclusionReference` into a self-contained copy on the
* reference page: load source content, generate fresh attachment ids, copy storage
* files, insert new attachment rows, return rewritten content. The caller
* (controller) returns the content blob to the client which then performs
* `editor.commands.insertContentAt(range, content)` to replace the
* reference node. The next Yjs save naturally cleans up the
* page_transclusion_references row, but we also delete it eagerly here so a
* crash between server response and client save doesn't leave a stale row.
*/
async unsyncReference(
referencePageId: string,
sourcePageId: string,
transclusionId: string,
viewerUserId: string,
): Promise<{ content: unknown }> {
const referencePage = await this.pageRepo.findById(referencePageId);
if (!referencePage || referencePage.deletedAt) {
throw new NotFoundException('Reference page not found');
}
const sourcePage = await this.pageRepo.findById(sourcePageId);
if (!sourcePage || sourcePage.deletedAt) {
throw new NotFoundException('Source page not found');
}
const accessible = new Set(
await this.pagePermissionRepo.filterAccessiblePageIds({
pageIds: [referencePageId, sourcePageId],
userId: viewerUserId,
}),
);
if (!accessible.has(referencePageId) || !accessible.has(sourcePageId)) {
throw new ForbiddenException();
}
const transclusion =
await this.pageTransclusionsRepo.findByPageAndTransclusion(
sourcePageId,
transclusionId,
);
if (!transclusion) {
throw new NotFoundException('Sync block not found');
}
const { content, copies } = rewriteAttachmentsForUnsync(
transclusion.content,
() => uuid7(),
);
if (copies.length > 0) {
const oldIds = copies.map((c) => c.oldAttachmentId);
const oldRows = await this.attachmentRepo.findBySpaceId(sourcePage.spaceId);
const byOldId = new Map(
oldRows
.filter(
(a) => oldIds.includes(a.id) && a.pageId === sourcePageId,
)
.map((a) => [a.id, a]),
);
for (const plan of copies) {
const old = byOldId.get(plan.oldAttachmentId);
if (!old) continue;
const newFilePath = old.filePath
.split(plan.oldAttachmentId)
.join(plan.newAttachmentId);
try {
await this.storageService.copy(old.filePath, newFilePath);
} catch (err) {
this.logger.error(
`unsync: failed to copy attachment ${old.id}`,
err as Error,
);
continue;
}
await this.attachmentRepo.insertAttachment({
id: plan.newAttachmentId,
type: old.type,
filePath: newFilePath,
fileName: old.fileName,
fileSize: old.fileSize,
mimeType: old.mimeType,
fileExt: old.fileExt,
creatorId: viewerUserId,
workspaceId: referencePage.workspaceId,
pageId: referencePageId,
spaceId: referencePage.spaceId,
});
}
}
await this.pageTransclusionReferencesRepo.deleteOne(
referencePageId,
sourcePageId,
transclusionId,
);
return { content };
}
}
@@ -0,0 +1,15 @@
export type TransclusionLookup =
| {
sourcePageId: string;
transclusionId: string;
content: unknown;
sourceUpdatedAt: Date;
}
| { sourcePageId: string; transclusionId: string; status: 'not_found' }
| { sourcePageId: string; transclusionId: string; status: 'no_access' };
export type TransclusionNodeSnapshot = {
transclusionId: string;
name: string | null;
content: unknown;
};
@@ -0,0 +1,111 @@
import { TransclusionNodeSnapshot } from '../transclusion.types';
const TRANSCLUSION_TYPE = 'transclusion';
const REFERENCE_TYPE = 'transclusionReference';
export type TransclusionReferenceSnapshot = {
/**
* Id of the `transclusion` (source) node whose content holds this reference,
* or `null` if the reference is loose on the page (not nested inside a source).
* Used by the cycle-detection CTE to walk source-to-source edges.
*/
containingTransclusionId: string | null;
sourcePageId: string;
transclusionId: string;
};
/**
* Walks a ProseMirror JSON document and returns one snapshot per top-level
* `transclusion` node. Does not recurse into transclusions (schema disallows
* nesting). Skips transclusion nodes without an id (transient state). When
* duplicate ids are encountered, the later occurrence wins so the result is
* deterministic.
*/
export function collectTransclusionsFromPmJson(
doc: unknown,
): TransclusionNodeSnapshot[] {
if (!doc || typeof doc !== 'object') return [];
const byId = new Map<string, TransclusionNodeSnapshot>();
const visit = (node: any): void => {
if (!node || typeof node !== 'object') return;
if (node.type === TRANSCLUSION_TYPE) {
const id = node.attrs?.id;
if (typeof id === 'string' && id.length > 0) {
const name =
typeof node.attrs?.name === 'string' && node.attrs.name.length > 0
? node.attrs.name
: null;
byId.set(id, {
transclusionId: id,
name,
content: { type: 'doc', content: node.content ?? [] },
});
}
return; // do not recurse into transclusion children
}
if (Array.isArray(node.content)) {
for (const child of node.content) visit(child);
}
};
visit(doc);
return Array.from(byId.values());
}
/**
* Walks a ProseMirror JSON document and returns one snapshot per unique
* `(containingTransclusionId, sourcePageId, transclusionId)` triple found on
* `transclusionReference` nodes. Recurses into every container, including
* `transclusion` (a source node may contain a reference to another source).
* Order preserved by first-seen.
*/
export function collectReferencesFromPmJson(
doc: unknown,
): TransclusionReferenceSnapshot[] {
if (!doc || typeof doc !== 'object') return [];
const seen = new Set<string>();
const out: TransclusionReferenceSnapshot[] = [];
const visit = (node: any, containingTransclusionId: string | null): void => {
if (!node || typeof node !== 'object') return;
if (node.type === REFERENCE_TYPE) {
const sourcePageId = node.attrs?.sourcePageId;
const transclusionId = node.attrs?.transclusionId;
if (
typeof sourcePageId === 'string' &&
sourcePageId.length > 0 &&
typeof transclusionId === 'string' &&
transclusionId.length > 0
) {
const key = `${containingTransclusionId ?? ''}::${sourcePageId}::${transclusionId}`;
if (!seen.has(key)) {
seen.add(key);
out.push({
containingTransclusionId,
sourcePageId,
transclusionId,
});
}
}
return; // atom node - no children
}
const nextContainer =
node.type === TRANSCLUSION_TYPE && typeof node.attrs?.id === 'string'
? node.attrs.id
: containingTransclusionId;
if (Array.isArray(node.content)) {
for (const child of node.content) visit(child, nextContainer);
}
};
visit(doc, null);
return out;
}
@@ -0,0 +1,65 @@
import { isAttachmentNode } from '../../../../common/helpers/prosemirror/utils';
export type AttachmentRewritePlan = {
oldAttachmentId: string;
newAttachmentId: string;
};
export type RewriteResult = {
content: unknown;
copies: AttachmentRewritePlan[];
};
/**
* Walk a ProseMirror JSON tree, rewrite every attachment-like node so its
* `attachmentId` (and any `src` substring matching that id) point at a fresh
* id. Each unique old id maps to exactly one new id; the caller is responsible
* for actually copying the underlying storage file.
*
* Pure: does not mutate the input. Returns a deep clone.
*/
export function rewriteAttachmentsForUnsync(
content: unknown,
generateId: () => string,
): RewriteResult {
const cloned = content ? JSON.parse(JSON.stringify(content)) : content;
const idMap = new Map<string, string>();
const visit = (node: any): void => {
if (!node || typeof node !== 'object') return;
if (
typeof node.type === 'string' &&
isAttachmentNode(node.type) &&
node.attrs
) {
const oldId = node.attrs.attachmentId;
if (typeof oldId === 'string' && oldId.length > 0) {
let newId = idMap.get(oldId);
if (!newId) {
newId = generateId();
idMap.set(oldId, newId);
}
node.attrs.attachmentId = newId;
if (typeof node.attrs.src === 'string' && node.attrs.src.includes(oldId)) {
node.attrs.src = node.attrs.src.split(oldId).join(newId);
}
}
}
if (Array.isArray(node.content)) {
for (const child of node.content) visit(child);
}
};
visit(cloned);
const copies: AttachmentRewritePlan[] = Array.from(idMap.entries()).map(
([oldAttachmentId, newAttachmentId]) => ({
oldAttachmentId,
newAttachmentId,
}),
);
return { content: cloned, copies };
}
@@ -11,6 +11,8 @@ import { SpaceMemberRepo } from '@docmost/db/repos/space/space-member.repo';
import { PageRepo } from './repos/page/page.repo';
import { PagePermissionRepo } from './repos/page/page-permission.repo';
import { CommentRepo } from './repos/comment/comment.repo';
import { PageTransclusionsRepo } from './repos/page-transclusions/page-transclusions.repo';
import { PageTransclusionReferencesRepo } from './repos/page-transclusion-references/page-transclusion-references.repo';
import { PageHistoryRepo } from './repos/page/page-history.repo';
import { AttachmentRepo } from './repos/attachment/attachment.repo';
import { KyselyDB } from '@docmost/db/types/kysely.types';
@@ -75,6 +77,8 @@ import { normalizePostgresUrl } from '../common/helpers';
SpaceMemberRepo,
PageRepo,
PagePermissionRepo,
PageTransclusionsRepo,
PageTransclusionReferencesRepo,
PageHistoryRepo,
CommentRepo,
FavoriteRepo,
@@ -97,6 +101,8 @@ import { normalizePostgresUrl } from '../common/helpers';
SpaceMemberRepo,
PageRepo,
PagePermissionRepo,
PageTransclusionsRepo,
PageTransclusionReferencesRepo,
PageHistoryRepo,
CommentRepo,
FavoriteRepo,
@@ -0,0 +1,79 @@
import { type Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await db.schema
.createTable('page_transclusions')
.addColumn('id', 'uuid', (col) =>
col.primaryKey().defaultTo(sql`gen_uuid_v7()`),
)
.addColumn('page_id', 'uuid', (col) =>
col.notNull().references('pages.id').onDelete('cascade'),
)
.addColumn('transclusion_id', 'varchar', (col) => col.notNull())
.addColumn('name', 'text')
.addColumn('content', 'jsonb', (col) => col.notNull())
.addColumn('created_at', 'timestamptz', (col) =>
col.notNull().defaultTo(sql`now()`),
)
.addColumn('updated_at', 'timestamptz', (col) =>
col.notNull().defaultTo(sql`now()`),
)
.addUniqueConstraint('page_transclusions_page_transclusion_unique', [
'page_id',
'transclusion_id',
])
.execute();
await db.schema
.createIndex('idx_page_transclusions_page_id')
.on('page_transclusions')
.column('page_id')
.execute();
await db.schema
.createTable('page_transclusion_references')
.addColumn('id', 'uuid', (col) =>
col.primaryKey().defaultTo(sql`gen_uuid_v7()`),
)
.addColumn('reference_page_id', 'uuid', (col) =>
col.notNull().references('pages.id').onDelete('cascade'),
)
.addColumn('containing_transclusion_id', 'varchar')
.addColumn('source_page_id', 'uuid', (col) =>
col.notNull().references('pages.id').onDelete('cascade'),
)
.addColumn('transclusion_id', 'varchar', (col) => col.notNull())
.addColumn('created_at', 'timestamptz', (col) =>
col.notNull().defaultTo(sql`now()`),
)
.addUniqueConstraint('page_transclusion_references_unique', [
'reference_page_id',
'containing_transclusion_id',
'source_page_id',
'transclusion_id',
])
.execute();
await db.schema
.createIndex('idx_page_transclusion_references_reference_page_id')
.on('page_transclusion_references')
.column('reference_page_id')
.execute();
await db.schema
.createIndex('idx_page_transclusion_references_source')
.on('page_transclusion_references')
.columns(['source_page_id', 'transclusion_id'])
.execute();
await db.schema
.createIndex('idx_page_transclusion_references_container')
.on('page_transclusion_references')
.columns(['reference_page_id', 'containing_transclusion_id'])
.execute();
}
export async function down(db: Kysely<any>): Promise<void> {
await db.schema.dropTable('page_transclusion_references').execute();
await db.schema.dropTable('page_transclusions').execute();
}
@@ -0,0 +1,181 @@
import { Injectable } from '@nestjs/common';
import { InjectKysely } from 'nestjs-kysely';
import { sql } from 'kysely';
import { KyselyDB, KyselyTransaction } from '@docmost/db/types/kysely.types';
import { dbOrTx } from '@docmost/db/utils';
import {
InsertablePageTransclusionReference,
PageTransclusionReference,
} from '@docmost/db/types/entity.types';
export type TransclusionReferenceKey = {
containingTransclusionId: string | null;
sourcePageId: string;
transclusionId: string;
};
@Injectable()
export class PageTransclusionReferencesRepo {
constructor(@InjectKysely() private readonly db: KyselyDB) {}
async findByReferencePageId(
referencePageId: string,
trx?: KyselyTransaction,
): Promise<PageTransclusionReference[]> {
return dbOrTx(this.db, trx)
.selectFrom('pageTransclusionReferences')
.selectAll()
.where('referencePageId', '=', referencePageId)
.execute();
}
async findReferencePageIdsByTransclusion(
sourcePageId: string,
transclusionId: string,
trx?: KyselyTransaction,
): Promise<string[]> {
const rows = await dbOrTx(this.db, trx)
.selectFrom('pageTransclusionReferences')
.select('referencePageId')
.distinct()
.where('sourcePageId', '=', sourcePageId)
.where('transclusionId', '=', transclusionId)
.execute();
return rows.map((r) => r.referencePageId);
}
async insertMany(
rows: InsertablePageTransclusionReference[],
trx?: KyselyTransaction,
): Promise<void> {
if (rows.length === 0) return;
await dbOrTx(this.db, trx)
.insertInto('pageTransclusionReferences')
.values(rows)
.onConflict((oc) =>
oc
.columns([
'referencePageId',
'containingTransclusionId',
'sourcePageId',
'transclusionId',
])
.doNothing(),
)
.execute();
}
async deleteByReferenceAndKeys(
referencePageId: string,
keys: TransclusionReferenceKey[],
trx?: KyselyTransaction,
): Promise<void> {
if (keys.length === 0) return;
await dbOrTx(this.db, trx)
.deleteFrom('pageTransclusionReferences')
.where('referencePageId', '=', referencePageId)
.where((eb) =>
eb.or(
keys.map((k) =>
eb.and([
k.containingTransclusionId === null
? eb('containingTransclusionId', 'is', null)
: eb(
'containingTransclusionId',
'=',
k.containingTransclusionId,
),
eb('sourcePageId', '=', k.sourcePageId),
eb('transclusionId', '=', k.transclusionId),
]),
),
),
)
.execute();
}
async deleteOne(
referencePageId: string,
sourcePageId: string,
transclusionId: string,
trx?: KyselyTransaction,
): Promise<void> {
await dbOrTx(this.db, trx)
.deleteFrom('pageTransclusionReferences')
.where('referencePageId', '=', referencePageId)
.where('sourcePageId', '=', sourcePageId)
.where('transclusionId', '=', transclusionId)
.execute();
}
async deleteByIds(ids: string[], trx?: KyselyTransaction): Promise<void> {
if (ids.length === 0) return;
await dbOrTx(this.db, trx)
.deleteFrom('pageTransclusionReferences')
.where('id', 'in', ids)
.execute();
}
/**
* Finds reference rows that participate in a cycle reachable from a given
* source `(pageId, transclusionId)`. The walk follows source-to-source edges
* (rows where `containing_transclusion_id IS NOT NULL`); loose page-level
* references are not graph edges and are ignored.
*
* Returned rows are the *closing edges* — those whose insertion completed a
* cycle. They are the safe set to remove to break the cycle while preserving
* unrelated structure.
*/
async findCyclicEdgesForSource(
sourcePageId: string,
transclusionId: string,
trx?: KyselyTransaction,
): Promise<PageTransclusionReference[]> {
const rows = await sql<PageTransclusionReference>`
WITH RECURSIVE walk(
start_page,
start_id,
page_id,
transclusion_id,
edge_id,
is_cycle,
path
) AS (
SELECT
${sourcePageId}::uuid,
${transclusionId}::varchar,
${sourcePageId}::uuid,
${transclusionId}::varchar,
NULL::uuid,
false,
ARRAY[(${sourcePageId}::uuid, ${transclusionId}::varchar)]
UNION ALL
SELECT
w.start_page,
w.start_id,
r.source_page_id,
r.transclusion_id,
r.id,
(r.source_page_id, r.transclusion_id) = ANY(w.path),
w.path || ARRAY[(r.source_page_id, r.transclusion_id)]
FROM page_transclusion_references r
JOIN walk w
ON r.reference_page_id = w.page_id
AND r.containing_transclusion_id = w.transclusion_id
WHERE r.containing_transclusion_id IS NOT NULL
AND NOT w.is_cycle
)
SELECT
r.id,
r.created_at AS "createdAt",
r.reference_page_id AS "referencePageId",
r.containing_transclusion_id AS "containingTransclusionId",
r.source_page_id AS "sourcePageId",
r.transclusion_id AS "transclusionId"
FROM walk w
JOIN page_transclusion_references r ON r.id = w.edge_id
WHERE w.is_cycle
`.execute(dbOrTx(this.db, trx));
return rows.rows;
}
}
@@ -0,0 +1,112 @@
import { Injectable } from '@nestjs/common';
import { InjectKysely } from 'nestjs-kysely';
import { KyselyDB, KyselyTransaction } from '@docmost/db/types/kysely.types';
import { dbOrTx } from '@docmost/db/utils';
import {
InsertablePageTransclusion,
PageTransclusion,
UpdatablePageTransclusion,
} from '@docmost/db/types/entity.types';
import { sql } from 'kysely';
@Injectable()
export class PageTransclusionsRepo {
constructor(@InjectKysely() private readonly db: KyselyDB) {}
async findByPageId(
pageId: string,
trx?: KyselyTransaction,
): Promise<PageTransclusion[]> {
return dbOrTx(this.db, trx)
.selectFrom('pageTransclusions')
.selectAll()
.where('pageId', '=', pageId)
.orderBy(sql`name asc nulls last`)
.orderBy('createdAt', 'asc')
.execute();
}
async findByPageAndTransclusion(
pageId: string,
transclusionId: string,
trx?: KyselyTransaction,
): Promise<PageTransclusion | undefined> {
return dbOrTx(this.db, trx)
.selectFrom('pageTransclusions')
.selectAll()
.where('pageId', '=', pageId)
.where('transclusionId', '=', transclusionId)
.executeTakeFirst();
}
async findManyByPageAndTransclusion(
keys: Array<{ pageId: string; transclusionId: string }>,
trx?: KyselyTransaction,
): Promise<PageTransclusion[]> {
if (keys.length === 0) return [];
return dbOrTx(this.db, trx)
.selectFrom('pageTransclusions')
.selectAll()
.where((eb) =>
eb.or(
keys.map((k) =>
eb.and([
eb('pageId', '=', k.pageId),
eb('transclusionId', '=', k.transclusionId),
]),
),
),
)
.execute();
}
async insert(
data: InsertablePageTransclusion,
trx?: KyselyTransaction,
): Promise<PageTransclusion> {
return dbOrTx(this.db, trx)
.insertInto('pageTransclusions')
.values(data)
.returningAll()
.executeTakeFirstOrThrow();
}
async insertMany(
data: InsertablePageTransclusion[],
trx?: KyselyTransaction,
): Promise<void> {
if (data.length === 0) return;
await dbOrTx(this.db, trx)
.insertInto('pageTransclusions')
.values(data)
.execute();
}
async update(
pageId: string,
transclusionId: string,
data: UpdatablePageTransclusion,
trx?: KyselyTransaction,
): Promise<void> {
await dbOrTx(this.db, trx)
.updateTable('pageTransclusions')
.set({ ...data, updatedAt: new Date() })
.where('pageId', '=', pageId)
.where('transclusionId', '=', transclusionId)
.execute();
}
async deleteByPageAndTransclusionIds(
pageId: string,
transclusionIds: string[],
trx?: KyselyTransaction,
): Promise<void> {
if (transclusionIds.length === 0) return;
await dbOrTx(this.db, trx)
.deleteFrom('pageTransclusions')
.where('pageId', '=', pageId)
.where('transclusionId', 'in', transclusionIds)
.execute();
}
}
+21
View File
@@ -228,6 +228,25 @@ export interface GroupUsers {
userId: string;
}
export interface PageTransclusionReferences {
createdAt: Generated<Timestamp>;
transclusionId: string;
referencePageId: string;
containingTransclusionId: string | null;
id: Generated<string>;
sourcePageId: string;
}
export interface PageTransclusions {
content: Json;
createdAt: Generated<Timestamp>;
transclusionId: string;
id: Generated<string>;
name: string | null;
pageId: string;
updatedAt: Generated<Timestamp>;
}
export interface PageHistory {
content: Json | null;
contributorIds: Generated<string[] | null>;
@@ -571,6 +590,8 @@ export interface DB {
groupUsers: GroupUsers;
notifications: Notifications;
pageAccess: PageAccess;
pageTransclusionReferences: PageTransclusionReferences;
pageTransclusions: PageTransclusions;
pagePermissions: PagePermissions;
pageHistory: PageHistory;
pageVerifications: PageVerifications;
@@ -7,6 +7,8 @@ import {
Groups,
Notifications,
PageAccess as _PageAccess,
PageTransclusions,
PageTransclusionReferences,
PagePermissions as _PagePermissions,
PageVerifications as _PageVerifications,
PageVerifiers as _PageVerifiers,
@@ -145,6 +147,18 @@ export type Favorite = Selectable<Favorites>;
export type InsertableFavorite = Insertable<Favorites>;
export type UpdatableFavorite = Updateable<Omit<Favorites, 'id'>>;
// Page Transclusion
export type PageTransclusion = Selectable<PageTransclusions>;
export type InsertablePageTransclusion = Insertable<PageTransclusions>;
export type UpdatablePageTransclusion = Updateable<Omit<PageTransclusions, 'id'>>;
// Page Transclusion Reference
export type PageTransclusionReference = Selectable<PageTransclusionReferences>;
export type InsertablePageTransclusionReference = Insertable<PageTransclusionReferences>;
export type UpdatablePageTransclusionReference = Updateable<
Omit<PageTransclusionReferences, 'id'>
>;
// File Task
export type FileTask = Selectable<FileTasks>;
export type InsertableFileTask = Insertable<FileTasks>;