feat: bug fixes (#2084)

* handle enter in inline code

* fix: duplicate comment cache

* track link nodes (backlinks)

* fix en-US translation

* fix internal a-links

* overrides

* 0.71.1
This commit is contained in:
Philip Okugbe
2026-04-05 13:45:36 +01:00
committed by GitHub
parent 642024ba9d
commit 895c1817ae
12 changed files with 593 additions and 488 deletions
+1 -1
View File
@@ -1,6 +1,6 @@
{
"name": "server",
"version": "0.71.0",
"version": "0.71.1",
"description": "",
"author": "",
"private": true,
@@ -11,6 +11,7 @@ import {
import {
extractMentions,
extractPageMentions,
extractInternalLinkSlugIds,
} from '../../common/helpers/prosemirror/utils';
import { PageHistoryRepo } from '@docmost/db/repos/page/page-history.repo';
import { PageRepo } from '@docmost/db/repos/page/page.repo';
@@ -77,12 +78,14 @@ export class HistoryProcessor extends WorkerHost implements OnModuleDestroy {
const mentions = extractMentions(page.content);
const pageMentions = extractPageMentions(mentions);
const internalLinkSlugIds = extractInternalLinkSlugIds(page.content);
await this.generalQueue
.add(QueueJob.PAGE_BACKLINKS, {
pageId,
workspaceId: page.workspaceId,
mentions: pageMentions,
internalLinkSlugIds,
} as IPageBacklinkJob)
.catch((err) => {
this.logger.error(
@@ -7,6 +7,10 @@ import { validate as isValidUUID } from 'uuid';
import { Transform } from '@tiptap/pm/transform';
import { TiptapTransformer } from '@hocuspocus/transformer';
import * as Y from 'yjs';
import {
INTERNAL_LINK_REGEX,
extractPageSlugId,
} from '../../../integrations/export/utils';
export interface MentionNode {
id: string;
@@ -64,6 +68,27 @@ export function extractPageMentions(mentionList: MentionNode[]): MentionNode[] {
return pageMentionList as MentionNode[];
}
export function extractInternalLinkSlugIds(prosemirrorJson: any): string[] {
const slugIds: string[] = [];
const doc = jsonToNode(prosemirrorJson);
doc.descendants((node: Node) => {
for (const mark of node.marks) {
if (mark.type.name === 'link' && mark.attrs.internal && mark.attrs.href) {
const match = mark.attrs.href.match(INTERNAL_LINK_REGEX);
if (match) {
const slugId = extractPageSlugId(match[5]);
if (slugId && !slugIds.includes(slugId)) {
slugIds.push(slugId);
}
}
}
}
});
return slugIds;
}
export function extractUserMentionIdsFromJson(json: any): string[] {
const userIds: string[] = [];
@@ -47,6 +47,10 @@ import { QueueJob, QueueName } from '../../../integrations/queue/constants';
import { EventName } from '../../../common/events/event.contants';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { CollaborationGateway } from '../../../collaboration/collaboration.gateway';
import {
INTERNAL_LINK_REGEX,
extractPageSlugId,
} from '../../../integrations/export/utils';
import { markdownToHtml } from '@docmost/editor-ext';
import { WatcherService } from '../../watcher/watcher.service';
import { sql } from 'kysely';
@@ -510,6 +514,11 @@ export class PageService {
});
});
const slugIdMap = new Map<string, CopyPageMapEntry>();
for (const [, entry] of pageMap) {
slugIdMap.set(entry.oldSlugId, entry);
}
const attachmentMap = new Map<string, ICopyPageAttachment>();
const insertablePages: InsertablePage[] = await Promise.all(
@@ -576,6 +585,28 @@ export class PageService {
node.attrs.slugId = mappedPage.newSlugId;
}
}
// Update internal page links in link marks
for (const mark of node.marks) {
if (
mark.type.name === 'link' &&
mark.attrs.internal &&
mark.attrs.href
) {
const match = mark.attrs.href.match(INTERNAL_LINK_REGEX);
if (match) {
const slugId = extractPageSlugId(match[5]);
if (slugId && slugIdMap.has(slugId)) {
const mappedPage = slugIdMap.get(slugId);
//@ts-ignore
mark.attrs.href = mark.attrs.href.replace(
slugId,
mappedPage.newSlugId,
);
}
}
}
}
});
const prosemirrorJson = prosemirrorDoc.toJSON();
@@ -4,6 +4,7 @@ export interface IPageBacklinkJob {
pageId: string;
workspaceId: string;
mentions: MentionNode[];
internalLinkSlugIds?: string[];
}
export interface IAddPageWatchersJob {
@@ -11,7 +11,7 @@ export async function processBacklinks(
backlinkRepo: BacklinkRepo,
data: IPageBacklinkJob,
): Promise<void> {
const { pageId, mentions, workspaceId } = data;
const { pageId, mentions, workspaceId, internalLinkSlugIds = [] } = data;
await executeTx(db, async (trx) => {
const existingBacklinks = await trx
@@ -20,7 +20,28 @@ export async function processBacklinks(
.where('sourcePageId', '=', pageId)
.execute();
if (existingBacklinks.length === 0 && mentions.length === 0) {
const mentionTargetPageIds = mentions
.filter((mention) => mention.entityId !== pageId)
.map((mention) => mention.entityId);
let resolvedLinkPageIds: string[] = [];
if (internalLinkSlugIds.length > 0) {
const resolvedPages = await trx
.selectFrom('pages')
.select('id')
.where('slugId', 'in', internalLinkSlugIds)
.where('workspaceId', '=', workspaceId)
.execute();
resolvedLinkPageIds = resolvedPages
.map((p) => p.id)
.filter((id) => id !== pageId);
}
const allTargetPageIds = [
...new Set([...mentionTargetPageIds, ...resolvedLinkPageIds]),
];
if (existingBacklinks.length === 0 && allTargetPageIds.length === 0) {
return;
}
@@ -28,16 +49,12 @@ export async function processBacklinks(
(backlink) => backlink.targetPageId,
);
const targetPageIds = mentions
.filter((mention) => mention.entityId !== pageId)
.map((mention) => mention.entityId);
let validTargetPages = [];
if (targetPageIds.length > 0) {
if (allTargetPageIds.length > 0) {
validTargetPages = await trx
.selectFrom('pages')
.select('id')
.where('id', 'in', targetPageIds)
.where('id', 'in', allTargetPageIds)
.where('workspaceId', '=', workspaceId)
.execute();
}