Merge branch 'main' into tiptap3-migration

This commit is contained in:
Philipinho
2025-09-10 04:00:01 +01:00
158 changed files with 5510 additions and 1107 deletions
+4 -1
View File
@@ -1,6 +1,6 @@
{
"name": "server",
"version": "0.22.2",
"version": "0.23.0",
"description": "",
"author": "",
"private": true,
@@ -66,6 +66,8 @@
"jsonwebtoken": "^9.0.2",
"kysely": "^0.28.2",
"kysely-migration-cli": "^0.4.2",
"ldapts": "^7.4.0",
"mammoth": "^1.10.0",
"mime-types": "^2.1.35",
"nanoid": "3.3.11",
"nestjs-kysely": "^1.2.0",
@@ -75,6 +77,7 @@
"p-limit": "^6.2.0",
"passport-google-oauth20": "^2.0.0",
"passport-jwt": "^4.0.1",
"pdfjs-dist": "^5.4.54",
"pg": "^8.16.0",
"pg-tsquery": "^8.4.2",
"postmark": "^4.0.5",
@@ -30,6 +30,7 @@ import {
Excalidraw,
Embed,
Mention,
Subpages,
} from '@docmost/editor-ext';
import { generateText, getSchema, JSONContent } from '@tiptap/core';
import { generateHTML } from '../common/helpers/prosemirror/html';
@@ -78,6 +79,7 @@ export const tiptapExtensions = [
Excalidraw,
Embed,
Mention,
Subpages,
] as any;
export function jsonToHtml(tiptapJson: any) {
+9
View File
@@ -87,3 +87,12 @@ export function extractBearerTokenFromHeader(
const [type, token] = request.headers.authorization?.split(' ') ?? [];
return type === 'Bearer' ? token : undefined;
}
export function hasLicenseOrEE(opts: {
licenseKey: string;
plan: string;
isCloud: boolean;
}): boolean {
const { licenseKey, plan, isCloud } = opts;
return Boolean(licenseKey) || (isCloud && plan === 'business');
}
@@ -12,10 +12,14 @@ export class InternalLogFilter extends ConsoleLogger {
constructor() {
super();
this.allowedLogLevels =
process.env.NODE_ENV === 'production'
? ['log', 'error', 'fatal']
: ['log', 'debug', 'verbose', 'warn', 'error', 'fatal'];
const isProduction = process.env.NODE_ENV === 'production';
const isDebugMode = process.env.DEBUG_MODE === 'true';
if (isProduction && !isDebugMode) {
this.allowedLogLevels = ['log', 'error', 'fatal'];
} else {
this.allowedLogLevels = ['log', 'debug', 'verbose', 'warn', 'error', 'fatal'];
}
}
private isLogLevelAllowed(level: string): boolean {
@@ -3,12 +3,15 @@ import { OnWorkerEvent, Processor, WorkerHost } from '@nestjs/bullmq';
import { Job } from 'bullmq';
import { AttachmentService } from '../services/attachment.service';
import { QueueJob, QueueName } from 'src/integrations/queue/constants';
import { Space } from '@docmost/db/types/entity.types';
import { ModuleRef } from '@nestjs/core';
@Processor(QueueName.ATTACHMENT_QUEUE)
export class AttachmentProcessor extends WorkerHost implements OnModuleDestroy {
private readonly logger = new Logger(AttachmentProcessor.name);
constructor(private readonly attachmentService: AttachmentService) {
constructor(
private readonly attachmentService: AttachmentService,
private moduleRef: ModuleRef,
) {
super();
}
@@ -25,6 +28,33 @@ export class AttachmentProcessor extends WorkerHost implements OnModuleDestroy {
job.data.pageId,
);
}
if (
job.name === QueueJob.ATTACHMENT_INDEX_CONTENT ||
job.name === QueueJob.ATTACHMENT_INDEXING
) {
let AttachmentEeModule: any;
try {
// eslint-disable-next-line @typescript-eslint/no-require-imports
AttachmentEeModule = require('./../../../ee/attachments-ee/attachment-ee.service');
} catch (err) {
this.logger.debug(
'Attachment enterprise module requested but EE module not bundled in this build',
);
return;
}
const attachmentEeService = this.moduleRef.get(
AttachmentEeModule.AttachmentEeService,
{ strict: false },
);
if (job.name === QueueJob.ATTACHMENT_INDEX_CONTENT) {
await attachmentEeService.indexAttachment(job.data.attachmentId);
} else if (job.name === QueueJob.ATTACHMENT_INDEXING) {
await attachmentEeService.indexAttachments(
job.data.workspaceId,
);
}
}
} catch (err) {
throw err;
}
@@ -37,9 +67,15 @@ export class AttachmentProcessor extends WorkerHost implements OnModuleDestroy {
@OnWorkerEvent('failed')
onError(job: Job) {
this.logger.error(
`Error processing ${job.name} job. Reason: ${job.failedReason}`,
);
if (job.name === QueueJob.ATTACHMENT_INDEX_CONTENT) {
this.logger.debug(
`Error processing ${job.name} job for attachment ${job.data?.attachmentId}. Reason: ${job.failedReason}`,
);
} else {
this.logger.error(
`Error processing ${job.name} job. Reason: ${job.failedReason}`,
);
}
}
@OnWorkerEvent('completed')
@@ -22,6 +22,9 @@ import { executeTx } from '@docmost/db/utils';
import { UserRepo } from '@docmost/db/repos/user/user.repo';
import { WorkspaceRepo } from '@docmost/db/repos/workspace/workspace.repo';
import { SpaceRepo } from '@docmost/db/repos/space/space.repo';
import { InjectQueue } from '@nestjs/bullmq';
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
import { Queue } from 'bullmq';
@Injectable()
export class AttachmentService {
@@ -33,6 +36,7 @@ export class AttachmentService {
private readonly workspaceRepo: WorkspaceRepo,
private readonly spaceRepo: SpaceRepo,
@InjectKysely() private readonly db: KyselyDB,
@InjectQueue(QueueName.ATTACHMENT_QUEUE) private attachmentQueue: Queue,
) {}
async uploadFile(opts: {
@@ -99,6 +103,23 @@ export class AttachmentService {
pageId,
});
}
// Only index PDFs and DOCX files
if (['.pdf', '.docx'].includes(attachment.fileExt.toLowerCase())) {
await this.attachmentQueue.add(
QueueJob.ATTACHMENT_INDEX_CONTENT,
{
attachmentId: attachmentId,
},
{
attempts: 2,
backoff: {
type: 'exponential',
delay: 10000,
},
},
);
}
} catch (err) {
// delete uploaded file on error
this.logger.error(err);
@@ -367,4 +388,5 @@ export class AttachmentService {
throw err;
}
}
}
@@ -106,6 +106,7 @@ export class AuthService {
await this.userRepo.updateUser(
{
password: newPasswordHash,
hasGeneratedPassword: false,
},
userId,
workspaceId,
@@ -186,6 +187,7 @@ export class AuthService {
await this.userRepo.updateUser(
{
password: newPasswordHash,
hasGeneratedPassword: false,
},
user.id,
workspace.id,
@@ -1,7 +1,7 @@
import { IsOptional, IsString } from 'class-validator';
import { IsNotEmpty, IsString } from 'class-validator';
export class DeletedPageDto {
@IsOptional()
@IsNotEmpty()
@IsString()
spaceId: string;
}
@@ -1,7 +1,11 @@
import { IsOptional, IsString } from 'class-validator';
import { IsOptional, IsString, IsUUID } from 'class-validator';
import { SpaceIdDto } from './page.dto';
export class SidebarPageDto extends SpaceIdDto {
export class SidebarPageDto {
@IsOptional()
@IsUUID()
spaceId: string;
@IsOptional()
@IsString()
pageId: string;
+19 -12
View File
@@ -194,7 +194,7 @@ export class PageController {
deletedPageDto.spaceId,
);
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Page)) {
if (ability.cannot(SpaceCaslAction.Manage, SpaceCaslSubject.Page)) {
throw new ForbiddenException();
}
@@ -254,21 +254,28 @@ export class PageController {
@Body() pagination: PaginationOptions,
@AuthUser() user: User,
) {
const ability = await this.spaceAbility.createForUser(user, dto.spaceId);
if (!dto.spaceId && !dto.pageId) {
throw new BadRequestException(
'Either spaceId or pageId must be provided',
);
}
let spaceId = dto.spaceId;
if (dto.pageId) {
const page = await this.pageRepo.findById(dto.pageId);
if (!page) {
throw new ForbiddenException();
}
spaceId = page.spaceId;
}
const ability = await this.spaceAbility.createForUser(user, spaceId);
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Page)) {
throw new ForbiddenException();
}
let pageId = null;
if (dto.pageId) {
const page = await this.pageRepo.findById(dto.pageId);
if (page.spaceId !== dto.spaceId) {
throw new ForbiddenException();
}
pageId = page.id;
}
return this.pageService.getSidebarPages(dto.spaceId, pagination, pageId);
return this.pageService.getSidebarPages(spaceId, pagination, dto.pageId);
}
@HttpCode(HttpStatus.OK)
@@ -109,7 +109,8 @@ export class PageService {
.selectFrom('pages')
.select(['position'])
.where('spaceId', '=', spaceId)
.orderBy('position', 'desc')
.where('deletedAt', 'is', null)
.orderBy('position', (ob) => ob.collate('C').desc())
.limit(1);
if (parentPageId) {
@@ -190,7 +191,7 @@ export class PageService {
'deletedAt',
])
.select((eb) => this.pageRepo.withHasChildren(eb))
.orderBy('position', 'asc')
.orderBy('position', (ob) => ob.collate('C').asc())
.where('deletedAt', 'is', null)
.where('spaceId', '=', spaceId);
@@ -5,15 +5,13 @@ import {
IsOptional,
IsString,
} from 'class-validator';
import { PartialType } from '@nestjs/mapped-types';
import { CreateWorkspaceDto } from '../../workspace/dto/create-workspace.dto';
export class SearchDTO {
@IsNotEmpty()
@IsString()
query: string;
@IsNotEmpty()
@IsOptional()
@IsString()
spaceId: string;
+11 -6
View File
@@ -31,6 +31,7 @@ import { Public } from '../../common/decorators/public.decorator';
import { ShareRepo } from '@docmost/db/repos/share/share.repo';
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
import { EnvironmentService } from '../../integrations/environment/environment.service';
import { hasLicenseOrEE } from '../../common/helpers';
@UseGuards(JwtAuthGuard)
@Controller('shares')
@@ -65,9 +66,11 @@ export class ShareController {
return {
...(await this.shareService.getSharedPage(dto, workspace.id)),
hasLicenseKey:
Boolean(workspace.licenseKey) ||
(this.environmentService.isCloud() && workspace.plan === 'business'),
hasLicenseKey: hasLicenseOrEE({
licenseKey: workspace.licenseKey,
isCloud: this.environmentService.isCloud(),
plan: workspace.plan,
}),
};
}
@@ -175,9 +178,11 @@ export class ShareController {
) {
return {
...(await this.shareService.getShareTree(dto.shareId, workspace.id)),
hasLicenseKey:
Boolean(workspace.licenseKey) ||
(this.environmentService.isCloud() && workspace.plan === 'business'),
hasLicenseKey: hasLicenseOrEE({
licenseKey: workspace.licenseKey,
isCloud: this.environmentService.isCloud(),
plan: workspace.plan,
}),
};
}
}
@@ -0,0 +1,15 @@
import { type Kysely } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await db.schema
.alterTable('auth_providers')
.addColumn('group_sync', 'boolean', (col) => col.defaultTo(false).notNull())
.execute();
}
export async function down(db: Kysely<any>): Promise<void> {
await db.schema
.alterTable('auth_providers')
.dropColumn('group_sync')
.execute();
}
@@ -0,0 +1,68 @@
import { type Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
// switch type to text column since you can't add value to PG types in a transaction
await db.schema
.alterTable('auth_providers')
.alterColumn('type', (col) => col.setDataType('text'))
.execute();
await db.schema.dropType('auth_provider_type').ifExists().execute();
await db.schema
.alterTable('users')
.addColumn('has_generated_password', 'boolean', (col) =>
col.notNull().defaultTo(false).ifNotExists(),
)
.execute();
await db.schema
.alterTable('auth_providers')
.addColumn('ldap_url', 'varchar', (col) => col)
.addColumn('ldap_bind_dn', 'varchar', (col) => col)
.addColumn('ldap_bind_password', 'varchar', (col) => col)
.addColumn('ldap_base_dn', 'varchar', (col) => col)
.addColumn('ldap_user_search_filter', 'varchar', (col) => col)
.addColumn('ldap_user_attributes', 'jsonb', (col) =>
col.defaultTo(sql`'{}'::jsonb`),
)
.addColumn('ldap_tls_enabled', 'boolean', (col) => col.defaultTo(false))
.addColumn('ldap_tls_ca_cert', 'text', (col) => col)
.addColumn('ldap_config', 'jsonb', (col) => col.defaultTo(sql`'{}'::jsonb`))
.addColumn('settings', 'jsonb', (col) => col.defaultTo(sql`'{}'::jsonb`))
.execute();
}
export async function down(db: Kysely<any>): Promise<void> {
await db.schema
.alterTable('users')
.dropColumn('has_generated_password')
.execute();
await db.schema
.alterTable('auth_providers')
.dropColumn('ldap_url')
.dropColumn('ldap_bind_dn')
.dropColumn('ldap_bind_password')
.dropColumn('ldap_base_dn')
.dropColumn('ldap_user_search_filter')
.dropColumn('ldap_user_attributes')
.dropColumn('ldap_tls_enabled')
.dropColumn('ldap_tls_ca_cert')
.dropColumn('ldap_config')
.dropColumn('settings')
.execute();
await db.schema
.createType('auth_provider_type')
.asEnum(['saml', 'oidc', 'google'])
.execute();
await db.deleteFrom('auth_providers').where('type', '=', 'ldap').execute();
await sql`
ALTER TABLE auth_providers
ALTER COLUMN type TYPE auth_provider_type
USING type::auth_provider_type
`.execute(db);
}
@@ -0,0 +1,29 @@
import { type Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await db.schema
.alterTable('attachments')
.addColumn('text_content', 'text', (col) => col)
.addColumn('tsv', sql`tsvector`, (col) => col)
.execute();
await db.schema
.createIndex('attachments_tsv_idx')
.on('attachments')
.using('GIN')
.column('tsv')
.execute();
}
export async function down(db: Kysely<any>): Promise<void> {
await db.schema
.alterTable('attachments')
.dropIndex('attachments_tsv_idx')
.execute();
await db.schema
.alterTable('attachments')
.dropColumn('text_content')
.dropColumn('tsv')
.execute();
}
@@ -12,6 +12,23 @@ import {
export class AttachmentRepo {
constructor(@InjectKysely() private readonly db: KyselyDB) {}
private baseFields: Array<keyof Attachment> = [
'id',
'fileName',
'filePath',
'fileSize',
'fileExt',
'mimeType',
'type',
'creatorId',
'pageId',
'spaceId',
'workspaceId',
'createdAt',
'updatedAt',
'deletedAt',
];
async findById(
attachmentId: string,
opts?: {
@@ -22,7 +39,7 @@ export class AttachmentRepo {
return db
.selectFrom('attachments')
.selectAll()
.select(this.baseFields)
.where('id', '=', attachmentId)
.executeTakeFirst();
}
@@ -36,7 +53,7 @@ export class AttachmentRepo {
return db
.insertInto('attachments')
.values(insertableAttachment)
.returningAll()
.returning(this.baseFields)
.executeTakeFirst();
}
@@ -50,7 +67,7 @@ export class AttachmentRepo {
return db
.selectFrom('attachments')
.selectAll()
.select(this.baseFields)
.where('spaceId', '=', spaceId)
.execute();
}
@@ -64,6 +81,7 @@ export class AttachmentRepo {
.updateTable('attachments')
.set(updatableAttachment)
.where('pageId', 'in', pageIds)
.returning(this.baseFields)
.executeTakeFirst();
}
@@ -75,7 +93,7 @@ export class AttachmentRepo {
.updateTable('attachments')
.set(updatableAttachment)
.where('id', '=', attachmentId)
.returningAll()
.returning(this.baseFields)
.executeTakeFirst();
}
@@ -399,6 +399,7 @@ export class PageRepo {
])
.$if(opts?.includeContent, (qb) => qb.select('content'))
.where('id', '=', parentPageId)
.where('deletedAt', 'is', null)
.unionAll((exp) =>
exp
.selectFrom('pages as p')
@@ -413,7 +414,8 @@ export class PageRepo {
'p.workspaceId',
])
.$if(opts?.includeContent, (qb) => qb.select('p.content'))
.innerJoin('page_hierarchy as ph', 'p.parentPageId', 'ph.id'),
.innerJoin('page_hierarchy as ph', 'p.parentPageId', 'ph.id')
.where('p.deletedAt', 'is', null),
),
)
.selectFrom('page_hierarchy')
@@ -34,6 +34,7 @@ export class UserRepo {
'createdAt',
'updatedAt',
'deletedAt',
'hasGeneratedPassword',
];
async findById(
+15 -3
View File
@@ -5,8 +5,6 @@
import type { ColumnType } from "kysely";
export type AuthProviderType = "google" | "oidc" | "saml";
export type Generated<T> = T extends ColumnType<infer S, infer I, infer U>
? ColumnType<S, I | undefined, U>
: ColumnType<T, T | undefined, T>;
@@ -39,6 +37,8 @@ export interface Attachments {
mimeType: string | null;
pageId: string | null;
spaceId: string | null;
textContent: string | null;
tsv: string | null;
type: string | null;
updatedAt: Generated<Timestamp>;
workspaceId: string;
@@ -62,13 +62,24 @@ export interface AuthProviders {
deletedAt: Timestamp | null;
id: Generated<string>;
isEnabled: Generated<boolean>;
groupSync: Generated<boolean>;
ldapBaseDn: string | null;
ldapBindDn: string | null;
ldapBindPassword: string | null;
ldapTlsCaCert: string | null;
ldapTlsEnabled: Generated<boolean | null>;
ldapUrl: string | null;
ldapUserAttributes: Json | null;
ldapUserSearchFilter: string | null;
ldapConfig: Json | null;
settings: Json | null;
name: string;
oidcClientId: string | null;
oidcClientSecret: string | null;
oidcIssuer: string | null;
samlCertificate: string | null;
samlUrl: string | null;
type: AuthProviderType;
type: string;
updatedAt: Generated<Timestamp>;
workspaceId: string;
}
@@ -275,6 +286,7 @@ export interface Users {
lastActiveAt: Timestamp | null;
lastLoginAt: Timestamp | null;
locale: string | null;
hasGeneratedPassword: Generated<boolean | null>;
name: string | null;
password: string | null;
role: string | null;
@@ -23,6 +23,10 @@ export class ExportPageDto {
@IsOptional()
@IsBoolean()
includeChildren?: boolean;
@IsOptional()
@IsBoolean()
includeAttachments?: boolean;
}
export class ExportSpaceDto {
@@ -46,7 +46,7 @@ export class ExportController {
includeContent: true,
});
if (!page) {
if (!page || page.deletedAt) {
throw new NotFoundException('Page not found');
}
@@ -55,40 +55,22 @@ export class ExportController {
throw new ForbiddenException();
}
const fileExt = getExportExtension(dto.format);
const fileName = sanitize(page.title || 'untitled') + fileExt;
if (dto.includeChildren) {
const zipFileBuffer = await this.exportService.exportPageWithChildren(
dto.pageId,
dto.format,
);
const newName = path.parse(fileName).name + '.zip';
res.headers({
'Content-Type': 'application/zip',
'Content-Disposition':
'attachment; filename="' + encodeURIComponent(newName) + '"',
});
res.send(zipFileBuffer);
return;
}
const rawContent = await this.exportService.exportPage(
const zipFileBuffer = await this.exportService.exportPages(
dto.pageId,
dto.format,
page,
true,
dto.includeAttachments,
dto.includeChildren,
);
const fileName = sanitize(page.title || 'untitled') + '.zip';
res.headers({
'Content-Type': getMimeType(fileExt),
'Content-Type': 'application/zip',
'Content-Disposition':
'attachment; filename="' + encodeURIComponent(fileName) + '"',
});
res.send(rawContent);
res.send(zipFileBuffer);
}
@UseGuards(JwtAuthGuard)
@@ -89,10 +89,28 @@ export class ExportService {
return;
}
async exportPageWithChildren(pageId: string, format: string) {
const pages = await this.pageRepo.getPageAndDescendants(pageId, {
includeContent: true,
});
async exportPages(
pageId: string,
format: string,
includeAttachments: boolean,
includeChildren: boolean,
) {
let pages: Page[];
if (includeChildren) {
//@ts-ignore
pages = await this.pageRepo.getPageAndDescendants(pageId, {
includeContent: true,
});
} else {
// Only fetch the single page when includeChildren is false
const page = await this.pageRepo.findById(pageId, {
includeContent: true,
});
if (page){
pages = [page];
}
}
if (!pages || pages.length === 0) {
throw new BadRequestException('No pages to export');
@@ -105,7 +123,7 @@ export class ExportService {
const tree = buildTree(pages as Page[]);
const zip = new JSZip();
await this.zipPages(tree, format, zip);
await this.zipPages(tree, format, zip, includeAttachments);
const zipFile = zip.generateNodeStream({
type: 'nodebuffer',
@@ -168,7 +186,7 @@ export class ExportService {
tree: PageExportTree,
format: string,
zip: JSZip,
includeAttachments = true,
includeAttachments: boolean,
): Promise<void> {
const slugIdToPath: Record<string, string> = {};
@@ -200,7 +218,8 @@ export class ExportService {
if (includeAttachments) {
await this.zipAttachments(updatedJsonContent, page.spaceId, folder);
updatedJsonContent = updateAttachmentUrlsToLocalPaths(updatedJsonContent);
updatedJsonContent =
updateAttachmentUrlsToLocalPaths(updatedJsonContent);
}
const pageTitle = getPageTitle(page.title);
@@ -69,8 +69,17 @@ function taskList(turndownService: TurndownService) {
'input[type="checkbox"]',
) as HTMLInputElement;
const isChecked = checkbox.checked;
return `- ${isChecked ? '[x]' : '[ ]'} ${content.trim()} \n`;
// Process content like regular list items
content = content
.replace(/^\n+/, '') // remove leading newlines
.replace(/\n+$/, '\n') // replace trailing newlines with just a single one
.replace(/\n/gm, '\n '); // indent nested content with 2 spaces
// Create the checkbox prefix
const prefix = `- ${isChecked ? '[x]' : '[ ]'} `;
return prefix + content + (node.nextSibling && !/\n$/.test(content) ? '\n' : '');
},
});
}
@@ -6,6 +6,7 @@ import { cleanUrlString } from '../utils/file.utils';
import { StorageService } from '../../storage/storage.service';
import { createReadStream } from 'node:fs';
import { promises as fs } from 'fs';
import { Readable } from 'stream';
import { getMimeType, sanitizeFileName } from '../../../common/helpers';
import { v7 } from 'uuid';
import { FileTask } from '@docmost/db/types/entity.types';
@@ -15,6 +16,21 @@ import { unwrapFromParagraph } from '../utils/import-formatter';
import { resolveRelativeAttachmentPath } from '../utils/import.utils';
import { load } from 'cheerio';
import pLimit from 'p-limit';
import { InjectQueue } from '@nestjs/bullmq';
import { Queue } from 'bullmq';
import { QueueJob, QueueName } from '../../queue/constants';
interface AttachmentInfo {
href: string;
fileName: string;
mimeType: string;
}
interface DrawioPair {
drawioFile?: AttachmentInfo;
pngFile?: AttachmentInfo;
baseName: string;
}
@Injectable()
export class ImportAttachmentService {
@@ -26,6 +42,7 @@ export class ImportAttachmentService {
constructor(
private readonly storageService: StorageService,
@InjectKysely() private readonly db: KyselyDB,
@InjectQueue(QueueName.ATTACHMENT_QUEUE) private attachmentQueue: Queue,
) {}
async processAttachments(opts: {
@@ -35,6 +52,7 @@ export class ImportAttachmentService {
pageId: string;
fileTask: FileTask;
attachmentCandidates: Map<string, string>;
pageAttachments?: AttachmentInfo[];
}): Promise<string> {
const {
html,
@@ -43,6 +61,7 @@ export class ImportAttachmentService {
pageId,
fileTask,
attachmentCandidates,
pageAttachments = [],
} = opts;
const attachmentTasks: (() => Promise<void>)[] = [];
@@ -57,7 +76,7 @@ export class ImportAttachmentService {
/**
* Cache keyed by the *relative* path that appears in the HTML.
* Ensures we upload (and DB-insert) each attachment at most once,
* even if its referenced multiple times on the page.
* even if it's referenced multiple times on the page.
*/
const processed = new Map<
string,
@@ -70,6 +89,99 @@ export class ImportAttachmentService {
}
>();
// Analyze attachments to identify Draw.io pairs
const { drawioPairs, skipFiles } = this.analyzeAttachments(pageAttachments);
// Map to store processed Draw.io SVGs
const drawioSvgMap = new Map<
string,
{
attachmentId: string;
apiFilePath: string;
fileName: string;
}
>();
//this.logger.debug(`Found ${drawioPairs.size} Draw.io pairs to process`);
// Process Draw.io pairs and create combined SVG files
for (const [drawioHref, pair] of drawioPairs) {
if (!pair.drawioFile) continue;
const drawioAbsPath = attachmentCandidates.get(drawioHref);
if (!drawioAbsPath) continue;
const pngAbsPath = pair.pngFile
? attachmentCandidates.get(pair.pngFile.href)
: undefined;
try {
// Create combined SVG with Draw.io data and PNG image
const svgBuffer = await this.createDrawioSvg(drawioAbsPath, pngAbsPath);
// Generate file details - always use "diagram.drawio.svg" as filename
const attachmentId = v7();
const fileName = 'diagram.drawio.svg';
const storageFilePath = `${getAttachmentFolderPath(
AttachmentType.File,
fileTask.workspaceId,
)}/${attachmentId}/${fileName}`;
const apiFilePath = `/api/files/${attachmentId}/${fileName}`;
// Upload the SVG file
attachmentTasks.push(async () => {
try {
const stream = Readable.from(svgBuffer);
// Upload to storage
await this.storageService.uploadStream(storageFilePath, stream);
// Insert into database
await this.db
.insertInto('attachments')
.values({
id: attachmentId,
filePath: storageFilePath,
fileName: fileName,
fileSize: svgBuffer.length,
mimeType: 'image/svg+xml',
type: 'file',
fileExt: '.svg',
creatorId: fileTask.creatorId,
workspaceId: fileTask.workspaceId,
pageId,
spaceId: fileTask.spaceId,
})
.execute();
uploadStats.completed++;
} catch (error) {
uploadStats.failed++;
uploadStats.failedFiles.push(fileName);
this.logger.error(
`Failed to upload Draw.io SVG ${fileName}:`,
error,
);
}
});
// Store the mapping for both Draw.io and PNG references
drawioSvgMap.set(drawioHref, { attachmentId, apiFilePath, fileName });
if (pair.pngFile) {
drawioSvgMap.set(pair.pngFile.href, {
attachmentId,
apiFilePath,
fileName,
});
}
} catch (error) {
this.logger.error(
`Failed to process Draw.io pair ${pair.baseName}:`,
error,
);
}
}
const uploadOnce = (relPath: string) => {
const abs = attachmentCandidates.get(relPath)!;
const attachmentId = v7();
@@ -85,16 +197,18 @@ export class ImportAttachmentService {
const apiFilePath = `/api/files/${attachmentId}/${fileNameWithExt}`;
attachmentTasks.push(() => this.uploadWithRetry({
abs,
storageFilePath,
attachmentId,
fileNameWithExt,
ext,
pageId,
fileTask,
uploadStats,
}));
attachmentTasks.push(() =>
this.uploadWithRetry({
abs,
storageFilePath,
attachmentId,
fileNameWithExt,
ext,
pageId,
fileTask,
uploadStats,
}),
);
return {
attachmentId,
@@ -121,195 +235,301 @@ export class ImportAttachmentService {
const pageDir = path.dirname(pageRelativePath);
const $ = load(html);
// image
for (const imgEl of $('img').toArray()) {
const $img = $(imgEl);
const src = cleanUrlString($img.attr('src') ?? '')!;
if (!src || src.startsWith('http')) continue;
// Cache for resolved paths to avoid repeated lookups
const resolvedPathCache = new Map<string, string | null>();
const relPath = resolveRelativeAttachmentPath(
src,
const getCachedResolvedPath = (rawPath: string): string | null => {
if (resolvedPathCache.has(rawPath)) {
return resolvedPathCache.get(rawPath)!;
}
const resolved = resolveRelativeAttachmentPath(
rawPath,
pageDir,
attachmentCandidates,
);
if (!relPath) continue;
resolvedPathCache.set(rawPath, resolved);
return resolved;
};
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await fs.stat(abs);
// Cache for file stats to avoid repeated file system calls
const statCache = new Map<string, any>();
const width = $img.attr('width') ?? '100%';
const align = $img.attr('data-align') ?? 'center';
const getCachedStat = async (absPath: string) => {
if (statCache.has(absPath)) {
return statCache.get(absPath);
}
const stat = await fs.stat(absPath);
statCache.set(absPath, stat);
return stat;
};
$img
.attr('src', apiFilePath)
.attr('data-attachment-id', attachmentId)
.attr('data-size', stat.size.toString())
.attr('width', width)
.attr('data-align', align);
// Single DOM traversal for all attachment elements
const selector =
'img, video, div[data-type="attachment"], a, div[data-type="excalidraw"], div[data-type="drawio"]';
const elements = $(selector).toArray();
unwrapFromParagraph($, $img);
}
for (const element of elements) {
const $el = $(element);
const tagName = element.tagName.toLowerCase();
// video
for (const vidEl of $('video').toArray()) {
const $vid = $(vidEl);
const src = cleanUrlString($vid.attr('src') ?? '')!;
if (!src || src.startsWith('http')) continue;
// Process based on element type
if (tagName === 'img') {
const src = cleanUrlString($el.attr('src') ?? '');
if (!src || src.startsWith('http')) continue;
const relPath = resolveRelativeAttachmentPath(
src,
pageDir,
attachmentCandidates,
);
if (!relPath) continue;
const relPath = getCachedResolvedPath(src);
if (!relPath) continue;
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await fs.stat(abs);
// Check if this image is part of a Draw.io pair
const drawioSvg = drawioSvgMap.get(relPath);
if (drawioSvg) {
const $drawio = $('<div>')
.attr('data-type', 'drawio')
.attr('data-src', drawioSvg.apiFilePath)
.attr('data-title', 'diagram')
.attr('data-width', '100%')
.attr('data-align', 'center')
.attr('data-attachment-id', drawioSvg.attachmentId);
const width = $vid.attr('width') ?? '100%';
const align = $vid.attr('data-align') ?? 'center';
$el.replaceWith($drawio);
unwrapFromParagraph($, $drawio);
continue;
}
$vid
.attr('src', apiFilePath)
.attr('data-attachment-id', attachmentId)
.attr('data-size', stat.size.toString())
.attr('width', width)
.attr('data-align', align);
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await getCachedStat(abs);
unwrapFromParagraph($, $vid);
}
// <div data-type="attachment">
for (const el of $('div[data-type="attachment"]').toArray()) {
const $oldDiv = $(el);
const rawUrl = cleanUrlString($oldDiv.attr('data-attachment-url') ?? '')!;
if (!rawUrl || rawUrl.startsWith('http')) continue;
const relPath = resolveRelativeAttachmentPath(
rawUrl,
pageDir,
attachmentCandidates,
);
if (!relPath) continue;
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await fs.stat(abs);
const fileName = path.basename(abs);
const mime = getMimeType(abs);
const $newDiv = $('<div>')
.attr('data-type', 'attachment')
.attr('data-attachment-url', apiFilePath)
.attr('data-attachment-name', fileName)
.attr('data-attachment-mime', mime)
.attr('data-attachment-size', stat.size.toString())
.attr('data-attachment-id', attachmentId);
$oldDiv.replaceWith($newDiv);
unwrapFromParagraph($, $newDiv);
}
// rewrite other attachments via <a>
for (const aEl of $('a').toArray()) {
const $a = $(aEl);
const href = cleanUrlString($a.attr('href') ?? '')!;
if (!href || href.startsWith('http')) continue;
const relPath = resolveRelativeAttachmentPath(
href,
pageDir,
attachmentCandidates,
);
if (!relPath) continue;
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await fs.stat(abs);
const ext = path.extname(relPath).toLowerCase();
if (ext === '.mp4') {
const $video = $('<video>')
$el
.attr('src', apiFilePath)
.attr('data-attachment-id', attachmentId)
.attr('data-size', stat.size.toString())
.attr('width', '100%')
.attr('data-align', 'center');
$a.replaceWith($video);
unwrapFromParagraph($, $video);
} else {
const confAliasName = $a.attr('data-linked-resource-default-alias');
let attachmentName = path.basename(abs);
if (confAliasName) attachmentName = confAliasName;
.attr('width', $el.attr('width') ?? '100%')
.attr('data-align', $el.attr('data-align') ?? 'center');
const $div = $('<div>')
.attr('data-type', 'attachment')
.attr('data-attachment-url', apiFilePath)
.attr('data-attachment-name', attachmentName)
.attr('data-attachment-mime', getMimeType(abs))
.attr('data-attachment-size', stat.size.toString())
.attr('data-attachment-id', attachmentId);
unwrapFromParagraph($, $el);
} else if (tagName === 'video') {
const src = cleanUrlString($el.attr('src') ?? '');
if (!src || src.startsWith('http')) continue;
$a.replaceWith($div);
unwrapFromParagraph($, $div);
}
}
// excalidraw and drawio
for (const type of ['excalidraw', 'drawio'] as const) {
for (const el of $(`div[data-type="${type}"]`).toArray()) {
const $oldDiv = $(el);
const rawSrc = cleanUrlString($oldDiv.attr('data-src') ?? '')!;
if (!rawSrc || rawSrc.startsWith('http')) continue;
const relPath = resolveRelativeAttachmentPath(
rawSrc,
pageDir,
attachmentCandidates,
);
const relPath = getCachedResolvedPath(src);
if (!relPath) continue;
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await fs.stat(abs);
const fileName = path.basename(abs);
const stat = await getCachedStat(abs);
const width = $oldDiv.attr('data-width') || '100%';
const align = $oldDiv.attr('data-align') || 'center';
const $newDiv = $('<div>')
.attr('data-type', type)
.attr('data-src', apiFilePath)
.attr('data-title', fileName)
.attr('data-width', width)
$el
.attr('src', apiFilePath)
.attr('data-attachment-id', attachmentId)
.attr('data-size', stat.size.toString())
.attr('data-align', align)
.attr('width', $el.attr('width') ?? '100%')
.attr('data-align', $el.attr('data-align') ?? 'center');
unwrapFromParagraph($, $el);
} else if (tagName === 'div') {
const dataType = $el.attr('data-type');
if (dataType === 'attachment') {
const rawUrl = cleanUrlString($el.attr('data-attachment-url') ?? '');
if (!rawUrl || rawUrl.startsWith('http')) continue;
const relPath = getCachedResolvedPath(rawUrl);
if (!relPath) continue;
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await getCachedStat(abs);
const fileName = path.basename(abs);
const mime = getMimeType(abs);
const $newDiv = $('<div>')
.attr('data-type', 'attachment')
.attr('data-attachment-url', apiFilePath)
.attr('data-attachment-name', fileName)
.attr('data-attachment-mime', mime)
.attr('data-attachment-size', stat.size.toString())
.attr('data-attachment-id', attachmentId);
$el.replaceWith($newDiv);
unwrapFromParagraph($, $newDiv);
} else if (dataType === 'excalidraw' || dataType === 'drawio') {
const rawSrc = cleanUrlString($el.attr('data-src') ?? '');
if (!rawSrc || rawSrc.startsWith('http')) continue;
const relPath = getCachedResolvedPath(rawSrc);
if (!relPath) continue;
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await getCachedStat(abs);
const fileName = path.basename(abs);
const $newDiv = $('<div>')
.attr('data-type', dataType)
.attr('data-src', apiFilePath)
.attr('data-title', fileName)
.attr('data-width', $el.attr('data-width') || '100%')
.attr('data-size', stat.size.toString())
.attr('data-align', $el.attr('data-align') || 'center')
.attr('data-attachment-id', attachmentId);
$el.replaceWith($newDiv);
unwrapFromParagraph($, $newDiv);
}
} else if (tagName === 'a') {
const href = cleanUrlString($el.attr('href') ?? '');
if (!href || href.startsWith('http')) continue;
const relPath = getCachedResolvedPath(href);
if (!relPath) continue;
// Check if this is a Draw.io file
const drawioSvg = drawioSvgMap.get(relPath);
if (drawioSvg) {
const $drawio = $('<div>')
.attr('data-type', 'drawio')
.attr('data-src', drawioSvg.apiFilePath)
.attr('data-title', 'diagram')
.attr('data-width', '100%')
.attr('data-align', 'center')
.attr('data-attachment-id', drawioSvg.attachmentId);
$el.replaceWith($drawio);
unwrapFromParagraph($, $drawio);
continue;
}
// Skip files that should be ignored
if (skipFiles.has(relPath)) {
$el.remove();
continue;
}
const { attachmentId, apiFilePath, abs } = processFile(relPath);
const stat = await getCachedStat(abs);
const ext = path.extname(relPath).toLowerCase();
if (ext === '.mp4') {
const $video = $('<video>')
.attr('src', apiFilePath)
.attr('data-attachment-id', attachmentId)
.attr('data-size', stat.size.toString())
.attr('width', '100%')
.attr('data-align', 'center');
$el.replaceWith($video);
unwrapFromParagraph($, $video);
} else {
const confAliasName = $el.attr('data-linked-resource-default-alias');
let attachmentName = path.basename(abs);
if (confAliasName) attachmentName = confAliasName;
const $div = $('<div>')
.attr('data-type', 'attachment')
.attr('data-attachment-url', apiFilePath)
.attr('data-attachment-name', attachmentName)
.attr('data-attachment-mime', getMimeType(abs))
.attr('data-attachment-size', stat.size.toString())
.attr('data-attachment-id', attachmentId);
$el.replaceWith($div);
unwrapFromParagraph($, $div);
}
}
}
// Collect all attachment IDs in the HTML in a single DOM traversal - O(n)
const usedAttachmentIds = new Set<string>();
$.root()
.find('[data-attachment-id]')
.each((_, el) => {
const attachmentId = $(el).attr('data-attachment-id');
if (attachmentId) {
usedAttachmentIds.add(attachmentId);
}
});
// Add Draw.io diagrams that weren't referenced in the HTML content
for (const [drawioHref, pair] of drawioPairs) {
const drawioSvg = drawioSvgMap.get(drawioHref);
if (!drawioSvg) continue;
if (usedAttachmentIds.has(drawioSvg.attachmentId)) {
continue; // Already in content
}
const $drawio = $('<div>')
.attr('data-type', 'drawio')
.attr('data-src', drawioSvg.apiFilePath)
.attr('data-title', 'diagram')
.attr('data-width', '100%')
.attr('data-align', 'center')
.attr('data-attachment-id', drawioSvg.attachmentId);
$.root().append($drawio);
}
// Process attachments from the attachment section that weren't referenced in HTML
// These need to be added as attachment nodes so they get uploaded
for (const attachment of pageAttachments) {
const { href, fileName, mimeType } = attachment;
// Skip temporary files or files that should be ignored
if (skipFiles.has(href)) {
continue;
}
// Check if this was part of a Draw.io pair that was already handled
if (drawioSvgMap.has(href)) {
continue;
}
// Check if already processed (was referenced in HTML)
if (processed.has(href)) {
continue;
}
// Skip if the file doesn't exist
if (!attachmentCandidates.has(href)) {
continue;
}
// This attachment was in the list but not referenced in HTML - add it
const { attachmentId, apiFilePath, abs } = processFile(href);
try {
const stat = await fs.stat(abs);
const mime = mimeType || getMimeType(abs);
// Add as attachment node at the end
const $attachmentDiv = $('<div>')
.attr('data-type', 'attachment')
.attr('data-attachment-url', apiFilePath)
.attr('data-attachment-name', fileName)
.attr('data-attachment-mime', mime)
.attr('data-attachment-size', stat.size.toString())
.attr('data-attachment-id', attachmentId);
$oldDiv.replaceWith($newDiv);
unwrapFromParagraph($, $newDiv);
$.root().append($attachmentDiv);
} catch (error) {
this.logger.error(`Failed to process attachment ${fileName}:`, error);
}
}
// wait for all uploads & DB inserts
uploadStats.total = attachmentTasks.length;
if (uploadStats.total > 0) {
this.logger.debug(`Starting upload of ${uploadStats.total} attachments...`);
try {
await Promise.all(
attachmentTasks.map(task => limit(task))
);
await Promise.all(attachmentTasks.map((task) => limit(task)));
} catch (err) {
this.logger.error('Import attachment upload error', err);
}
this.logger.debug(
`Upload completed: ${uploadStats.completed}/${uploadStats.total} successful, ${uploadStats.failed} failed`
`Upload completed: ${uploadStats.completed}/${uploadStats.total} successful, ${uploadStats.failed} failed`,
);
if (uploadStats.failed > 0) {
this.logger.warn(
`Failed to upload ${uploadStats.failed} files:`,
uploadStats.failedFiles
uploadStats.failedFiles,
);
}
}
@@ -317,6 +537,214 @@ export class ImportAttachmentService {
return $.root().html() || '';
}
private analyzeAttachments(attachments: AttachmentInfo[]): {
drawioPairs: Map<string, DrawioPair>;
skipFiles: Set<string>;
} {
const drawioPairs = new Map<string, DrawioPair>();
const skipFiles = new Set<string>();
// Group attachments by type
const drawioFiles: AttachmentInfo[] = [];
const pngByBaseName = new Map<string, AttachmentInfo[]>();
const nonDrawioExtensions = new Set([
'.png',
'.jpg',
'.jpeg',
'.gif',
'.svg',
'.txt',
'.pdf',
'.doc',
'.docx',
'.xls',
'.xlsx',
'.csv',
'.zip',
'.tar',
'.gz',
]);
// Single pass through attachments
for (const attachment of attachments) {
const { fileName, mimeType, href } = attachment;
const fileNameLower = fileName.toLowerCase();
// Skip temporary files
if (fileName.endsWith('.tmp') || fileName.includes('~drawio~')) {
skipFiles.add(href);
continue;
}
// Check for Draw.io files
if (mimeType === 'application/vnd.jgraph.mxfile') {
const ext = fileNameLower.substring(fileNameLower.lastIndexOf('.'));
if (!nonDrawioExtensions.has(ext)) {
drawioFiles.push(attachment);
} else {
//Skipped non-Draw.io file with mxfile MIME.
}
}
if (mimeType === 'image/png' || fileNameLower.endsWith('.png')) {
const baseNames: string[] = [];
if (fileName.endsWith('.drawio.png')) {
// Cloud format: "name.drawio.png" -> base is "name"
baseNames.push(fileName.slice(0, -11)); // Remove .drawio.png
} else if (fileName.endsWith('.png')) {
// Server format: "name.png" -> base is "name"
baseNames.push(fileName.slice(0, -4)); // Remove .png
}
for (const baseName of baseNames) {
if (!pngByBaseName.has(baseName)) {
pngByBaseName.set(baseName, []);
}
pngByBaseName.get(baseName)!.push(attachment);
}
}
}
// Match Draw.io files with PNG counterparts
for (const drawio of drawioFiles) {
let baseName: string;
if (drawio.fileName.endsWith('.drawio')) {
baseName = drawio.fileName.slice(0, -7); // Remove .drawio
} else {
// Confluence Server: no extension
baseName = drawio.fileName;
}
const candidatePngs = pngByBaseName.get(baseName) || [];
let matchingPng: AttachmentInfo | undefined;
// Extract the attachment ID from the Draw.io href
// Format: attachments/16941088/36044817.png -> ID is 36044817
const drawioIdMatch = drawio.href.match(/\/(\d+)\.\w+$/);
const drawioId = drawioIdMatch ? drawioIdMatch[1] : null;
if (drawioId) {
// Look for PNG with adjacent ID (usually PNG ID = Draw.io ID + small increment)
// In Confluence, related files often have sequential or near-sequential IDs
for (const png of candidatePngs) {
const pngIdMatch = png.href.match(/\/(\d+)\.png$/);
const pngId = pngIdMatch ? pngIdMatch[1] : null;
//TODO: should revisit this
// but seem to be the best option for now
// to prevent reusing the first drawio preview image if there are more with the same name
if (pngId && drawioId) {
const idDiff = Math.abs(parseInt(pngId) - parseInt(drawioId));
// PNG is usually within ~30 IDs of the Draw.io file
if (idDiff <= 30) {
// Verify filename match
if (
png.fileName === `${baseName}.drawio.png` ||
(!drawio.fileName.endsWith('.drawio') &&
png.fileName === `${baseName}.png`)
) {
matchingPng = png;
break;
}
}
}
}
}
// Fallback to name-only matching if ID-based matching fails
if (!matchingPng) {
for (const png of candidatePngs) {
if (png.fileName === `${baseName}.drawio.png`) {
matchingPng = png;
break;
}
if (
!drawio.fileName.endsWith('.drawio') &&
png.fileName === `${baseName}.png`
) {
matchingPng = png;
break;
}
}
}
if (matchingPng) {
this.logger.debug(
`Found Draw.io pair: ${drawio.fileName} -> ${matchingPng.fileName}`,
);
} else {
this.logger.debug(`No PNG found for Draw.io file: ${drawio.fileName}`);
}
const pair: DrawioPair = {
drawioFile: drawio,
pngFile: matchingPng,
baseName,
};
drawioPairs.set(drawio.href, pair);
skipFiles.add(drawio.href);
if (matchingPng) {
skipFiles.add(matchingPng.href);
// Remove the matched PNG from the candidates to prevent reuse
const remainingPngs = pngByBaseName
.get(baseName)
?.filter((png) => png.href !== matchingPng.href);
if (remainingPngs && remainingPngs.length > 0) {
pngByBaseName.set(baseName, remainingPngs);
} else {
pngByBaseName.delete(baseName);
}
}
}
return { drawioPairs, skipFiles };
}
private async createDrawioSvg(
drawioPath: string,
pngPath?: string,
): Promise<Buffer> {
try {
const drawioContent = await fs.readFile(drawioPath, 'utf-8');
const drawioBase64 = Buffer.from(drawioContent).toString('base64');
let imageElement = '';
// If we have a PNG, include it in the SVG
if (pngPath) {
try {
const pngBuffer = await fs.readFile(pngPath);
const pngBase64 = pngBuffer.toString('base64');
imageElement = `<image href="data:image/png;base64,${pngBase64}" width="100%" height="100%"/>`;
} catch (error) {
this.logger.warn(
`Could not read PNG file for Draw.io diagram: ${pngPath}`,
error,
);
}
}
// Create the SVG with embedded Draw.io data and image
// Default dimensions for Draw.io diagrams if no image is provided
const svgContent = `<?xml version="1.0" encoding="UTF-8"?>
<svg xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink"
width="600"
height="400"
viewBox="0 0 600 400"
content="${drawioBase64}">${imageElement}</svg>`;
return Buffer.from(svgContent, 'utf-8');
} catch (error) {
this.logger.error(`Failed to create Draw.io SVG: ${error}`);
throw error;
}
}
private async uploadWithRetry(opts: {
abs: string;
storageFilePath: string;
@@ -344,7 +772,7 @@ export class ImportAttachmentService {
} = opts;
let lastError: Error;
for (let attempt = 1; attempt <= this.MAX_RETRIES; attempt++) {
try {
const fileStream = createReadStream(abs);
@@ -367,35 +795,65 @@ export class ImportAttachmentService {
spaceId: fileTask.spaceId,
})
.execute();
// Queue PDF and DOCX files for indexing
const supportedExtensions = ['.pdf', '.docx'];
if (supportedExtensions.includes(ext.toLowerCase())) {
try {
await this.attachmentQueue.add(
QueueJob.ATTACHMENT_INDEX_CONTENT,
{ attachmentId },
{
attempts: 1,
backoff: {
type: 'exponential',
delay: 30 * 1000,
},
deduplication: {
id: attachmentId,
},
removeOnComplete: true,
removeOnFail: false,
},
);
this.logger.debug(
`Queued ${fileNameWithExt} for indexing (attachment ID: ${attachmentId})`,
);
} catch (err) {
this.logger.error(
`Failed to queue indexing for imported attachment ${attachmentId}: ${err}`,
);
}
}
uploadStats.completed++;
if (uploadStats.completed % 10 === 0) {
this.logger.debug(
`Upload progress: ${uploadStats.completed}/${uploadStats.total}`
`Upload progress: ${uploadStats.completed}/${uploadStats.total}`,
);
}
return;
} catch (error) {
lastError = error as Error;
this.logger.warn(
`Upload attempt ${attempt}/${this.MAX_RETRIES} failed for ${fileNameWithExt}: ${error instanceof Error ? error.message : String(error)}`
`Upload attempt ${attempt}/${this.MAX_RETRIES} failed for ${fileNameWithExt}: ${error instanceof Error ? error.message : String(error)}`,
);
if (attempt < this.MAX_RETRIES) {
await new Promise(resolve =>
setTimeout(resolve, this.RETRY_DELAY * attempt)
await new Promise((resolve) =>
setTimeout(resolve, this.RETRY_DELAY * attempt),
);
}
}
}
uploadStats.failed++;
uploadStats.failedFiles.push(fileNameWithExt);
this.logger.error(
`Failed to upload ${fileNameWithExt} after ${this.MAX_RETRIES} attempts:`,
lastError
lastError,
);
}
}
@@ -178,7 +178,7 @@ export class ImportService {
.selectFrom('pages')
.select(['id', 'position'])
.where('spaceId', '=', spaceId)
.orderBy('position', 'desc')
.orderBy('position', (ob) => ob.collate('C').desc())
.limit(1)
.where('parentPageId', 'is', null)
.executeTakeFirst();
@@ -9,6 +9,8 @@ export enum QueueName {
export enum QueueJob {
SEND_EMAIL = 'send-email',
DELETE_SPACE_ATTACHMENTS = 'delete-space-attachments',
ATTACHMENT_INDEX_CONTENT = 'attachment-index-content',
ATTACHMENT_INDEXING = 'attachment-indexing',
DELETE_PAGE_ATTACHMENTS = 'delete-page-attachments',
PAGE_CONTENT_UPDATE = 'page-content-update',
@@ -40,8 +40,11 @@ export class LocalDriver implements StorageDriver {
async copy(fromFilePath: string, toFilePath: string): Promise<void> {
try {
const fromFullPath = this._fullPath(fromFilePath);
const toFullPath = this._fullPath(toFilePath);
if (await this.exists(fromFilePath)) {
await fs.copy(fromFilePath, toFilePath);
await fs.copy(fromFullPath, toFullPath);
}
} catch (err) {
throw new Error(`Failed to copy file: ${(err as Error).message}`);