Merge branch 'main' into tiptap3-migration

This commit is contained in:
Philipinho
2026-01-24 00:18:48 +00:00
20 changed files with 400 additions and 155 deletions
+2
View File
@@ -18,6 +18,7 @@ import { SecurityModule } from './integrations/security/security.module';
import { TelemetryModule } from './integrations/telemetry/telemetry.module';
import { RedisModule } from '@nestjs-labs/nestjs-ioredis';
import { RedisConfigService } from './integrations/redis/redis-config.service';
import { LoggerModule } from './common/logger/logger.module';
const enterpriseModules = [];
try {
@@ -35,6 +36,7 @@ try {
@Module({
imports: [
LoggerModule,
CoreModule,
DatabaseModule,
EnvironmentModule,
@@ -8,9 +8,11 @@ import { QueueModule } from '../../integrations/queue/queue.module';
import { EventEmitterModule } from '@nestjs/event-emitter';
import { HealthModule } from '../../integrations/health/health.module';
import { CollaborationController } from './collaboration.controller';
import { LoggerModule } from '../../common/logger/logger.module';
@Module({
imports: [
LoggerModule,
DatabaseModule,
EnvironmentModule,
CollaborationModule,
@@ -5,8 +5,8 @@ import {
NestFastifyApplication,
} from '@nestjs/platform-fastify';
import { TransformHttpResponseInterceptor } from '../../common/interceptors/http-response.interceptor';
import { InternalLogFilter } from '../../common/logger/internal-log-filter';
import { Logger } from '@nestjs/common';
import { Logger as PinoLogger } from 'nestjs-pino';
async function bootstrap() {
const app = await NestFactory.create<NestFastifyApplication>(
@@ -17,10 +17,12 @@ async function bootstrap() {
maxParamLength: 500,
}),
{
logger: new InternalLogFilter(),
bufferLogs: true,
},
);
app.useLogger(app.get(PinoLogger));
app.setGlobalPrefix('api', { exclude: ['/'] });
app.enableCors();
+36
View File
@@ -2,6 +2,7 @@ import * as path from 'path';
import * as bcrypt from 'bcrypt';
import { sanitize } from 'sanitize-filename-ts';
import { FastifyRequest } from 'fastify';
import { Readable, Transform } from 'stream';
export const envPath = path.resolve(process.cwd(), '..', '..', '.env');
@@ -98,3 +99,38 @@ export function hasLicenseOrEE(opts: {
const { licenseKey, plan, isCloud } = opts;
return Boolean(licenseKey) || (isCloud && plan === 'business');
}
/**
* Normalizes a database URL for postgres.js compatibility.
* - Removes `sslmode=no-verify` (not supported by postgres.js), keeps other sslmode values
* - Removes `schema` parameter (has no effect via connection string)
* Note: If we don't strip them, the connection will fail
*/
export function normalizePostgresUrl(url: string): string {
const parsed = new URL(url);
const newParams = new URLSearchParams();
for (const [key, value] of parsed.searchParams) {
if (key === 'sslmode' && value === 'no-verify') continue;
if (key === 'schema') continue;
newParams.append(key, value);
}
parsed.search = newParams.toString();
return parsed.toString();
}
export function createByteCountingStream(source: Readable) {
let bytesRead = 0;
const stream = new Transform({
transform(chunk, encoding, callback) {
bytesRead += chunk.length;
callback(null, chunk);
},
});
source.pipe(stream);
source.on('error', (err) => stream.emit('error', err));
return { stream, getBytesRead: () => bytesRead };
}
@@ -0,0 +1,9 @@
import { Module } from '@nestjs/common';
import { LoggerModule as PinoLoggerModule } from 'nestjs-pino';
import { createPinoConfig } from './pino.config';
@Module({
imports: [PinoLoggerModule.forRoot(createPinoConfig())],
exports: [PinoLoggerModule],
})
export class LoggerModule {}
@@ -0,0 +1,77 @@
import { Params } from 'nestjs-pino';
import { stdTimeFunctions } from 'pino';
const CONTEXTS_TO_IGNORE = [
'InstanceLoader',
'RoutesResolver',
'RouterExplorer',
'WebSocketsController',
];
export function createPinoConfig(): Params {
const isProduction = process.env.NODE_ENV === 'production';
const isDebugMode = process.env.DEBUG_MODE === 'true';
const logHttp = process.env.LOG_HTTP === 'true';
const level = isProduction && !isDebugMode ? 'info' : 'debug';
return {
pinoHttp: {
level,
timestamp: stdTimeFunctions.isoTime,
transport: !isProduction
? {
target: 'pino-pretty',
options: {
colorize: true,
singleLine: true,
translateTime: 'SYS:standard',
ignore: 'pid,hostname',
},
}
: undefined,
formatters: {
level: (label) => ({ level: label }),
log: (object: Record<string, unknown>) => {
if (isProduction && !isDebugMode) {
const context = object['context'] as string | undefined;
if (context && CONTEXTS_TO_IGNORE.includes(context)) {
return { filtered: true };
}
}
return object;
},
},
serializers: {
req: (req) => {
const forwardedFor = req.headers?.['x-forwarded-for'];
const ip =
req.headers?.['cf-connecting-ip'] ||
(typeof forwardedFor === 'string' ? forwardedFor.split(',')[0]?.trim() : undefined) ||
req.remoteAddress;
return {
method: req.method,
url: req.url,
ip,
userAgent: req.headers?.['user-agent'],
};
},
res: (res) => ({
statusCode: res.statusCode,
}),
},
customLogLevel: (_req, res, err) => {
if (res.statusCode >= 500 || err) return 'error';
if (res.statusCode >= 400) return 'warn';
return 'info';
},
autoLogging: logHttp
? {
ignore: (req) =>
req.url === '/api/health' || req.url === '/api/health/live',
}
: false,
},
};
}
@@ -5,15 +5,17 @@ import { sanitizeFileName } from '../../common/helpers';
import * as sharp from 'sharp';
export interface PreparedFile {
buffer: Buffer;
buffer?: Buffer;
fileName: string;
fileSize: number;
fileExtension: string;
mimeType: string;
multiPartFile?: MultipartFile;
}
export async function prepareFile(
filePromise: Promise<MultipartFile>,
options: { skipBuffer?: boolean } = {},
): Promise<PreparedFile> {
const file = await filePromise;
@@ -22,10 +24,16 @@ export async function prepareFile(
}
try {
const buffer = await file.toBuffer();
let buffer: Buffer | undefined;
let fileSize = 0;
if (!options.skipBuffer) {
buffer = await file.toBuffer();
fileSize = buffer.length;
}
const sanitizedFilename = sanitizeFileName(file.filename);
const fileName = sanitizedFilename.slice(0, 255);
const fileSize = buffer.length;
const fileExtension = path.extname(file.filename).toLowerCase();
return {
@@ -34,6 +42,7 @@ export async function prepareFile(
fileSize,
fileExtension,
mimeType: file.mimetype,
multiPartFile: file,
};
} catch (error) {
throw error;
@@ -4,6 +4,7 @@ import {
Logger,
NotFoundException,
} from '@nestjs/common';
import { Readable } from 'stream';
import { StorageService } from '../../../integrations/storage/storage.service';
import { MultipartFile } from '@fastify/multipart';
import {
@@ -26,6 +27,7 @@ import { SpaceRepo } from '@docmost/db/repos/space/space.repo';
import { InjectQueue } from '@nestjs/bullmq';
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
import { Queue } from 'bullmq';
import { createByteCountingStream } from '../../../common/helpers/utils';
@Injectable()
export class AttachmentService {
@@ -49,7 +51,9 @@ export class AttachmentService {
attachmentId?: string;
}) {
const { filePromise, pageId, spaceId, userId, workspaceId } = opts;
const preparedFile: PreparedFile = await prepareFile(filePromise);
const preparedFile: PreparedFile = await prepareFile(filePromise, {
skipBuffer: true,
});
let isUpdate = false;
let attachmentId = null;
@@ -81,7 +85,14 @@ export class AttachmentService {
const filePath = `${getAttachmentFolderPath(AttachmentType.File, workspaceId)}/${attachmentId}/${preparedFile.fileName}`;
await this.uploadToDrive(filePath, preparedFile.buffer);
const { stream, getBytesRead } = createByteCountingStream(
preparedFile.multiPartFile.file,
);
await this.uploadToDrive(filePath, stream);
// Update fileSize from the consumed stream
preparedFile.fileSize = getBytesRead();
let attachment: Attachment = null;
try {
@@ -142,7 +153,10 @@ export class AttachmentService {
const preparedFile: PreparedFile = await prepareFile(filePromise);
validateFileType(preparedFile.fileExtension, validImageExtensions);
const processedBuffer = await compressAndResizeIcon(preparedFile.buffer, type);
const processedBuffer = await compressAndResizeIcon(
preparedFile.buffer,
type,
);
preparedFile.buffer = processedBuffer;
preparedFile.fileSize = processedBuffer.length;
preparedFile.fileName = uuid4() + preparedFile.fileExtension;
@@ -232,9 +246,9 @@ export class AttachmentService {
}
}
async uploadToDrive(filePath: string, fileBuffer: any) {
async uploadToDrive(filePath: string, fileContent: Buffer | Readable) {
try {
await this.storageService.upload(filePath, fileBuffer);
await this.storageService.upload(filePath, fileContent);
} catch (err) {
this.logger.error('Error uploading file to drive:', err);
throw new BadRequestException('Error uploading file to drive');
+23 -20
View File
@@ -7,8 +7,7 @@ import {
} from '@nestjs/common';
import { InjectKysely, KyselyModule } from 'nestjs-kysely';
import { EnvironmentService } from '../integrations/environment/environment.service';
import { CamelCasePlugin, LogEvent, PostgresDialect, sql } from 'kysely';
import { Pool, types } from 'pg';
import { CamelCasePlugin, LogEvent, sql } from 'kysely';
import { GroupRepo } from '@docmost/db/repos/group/group.repo';
import { WorkspaceRepo } from '@docmost/db/repos/workspace/workspace.repo';
import { UserRepo } from '@docmost/db/repos/user/user.repo';
@@ -26,9 +25,9 @@ import { UserTokenRepo } from './repos/user-token/user-token.repo';
import { BacklinkRepo } from '@docmost/db/repos/backlink/backlink.repo';
import { ShareRepo } from '@docmost/db/repos/share/share.repo';
import { PageListener } from '@docmost/db/listeners/page.listener';
// https://github.com/brianc/node-postgres/issues/811
types.setTypeParser(types.builtins.INT8, (val) => Number(val));
import { PostgresJSDialect } from 'kysely-postgres-js';
import * as postgres from 'postgres';
import { normalizePostgresUrl } from '../common/helpers';
@Global()
@Module({
@@ -37,26 +36,30 @@ types.setTypeParser(types.builtins.INT8, (val) => Number(val));
imports: [],
inject: [EnvironmentService],
useFactory: (environmentService: EnvironmentService) => ({
dialect: new PostgresDialect({
pool: new Pool({
connectionString: environmentService.getDatabaseURL(),
max: environmentService.getDatabaseMaxPool(),
}).on('error', (err) => {
console.error('Database error:', err.message);
}),
dialect: new PostgresJSDialect({
postgres: postgres(
normalizePostgresUrl(environmentService.getDatabaseURL()),
{
max: environmentService.getDatabaseMaxPool(),
onnotice: () => {},
types: {
bigint: {
to: 20,
from: [20, 1700],
serialize: (value: number) => value.toString(),
parse: (value: string) => Number.parseInt(value),
},
},
},
),
}),
plugins: [new CamelCasePlugin()],
log: (event: LogEvent) => {
if (environmentService.getNodeEnv() !== 'development') return;
const logger = new Logger(DatabaseModule.name);
if (event.level) {
if (process.env.DEBUG_DB?.toLowerCase() === 'true') {
logger.debug(event.query.sql);
logger.debug('query time: ' + event.queryDurationMillis + ' ms');
//if (event.query.parameters.length > 0) {
// logger.debug('parameters: ' + event.query.parameters);
//}
}
if (process.env.DEBUG_DB?.toLowerCase() === 'true') {
logger.debug(event.query.sql);
logger.debug('query time: ' + event.queryDurationMillis + ' ms');
}
},
}),
+6 -12
View File
@@ -1,25 +1,19 @@
import * as path from 'path';
import { promises as fs } from 'fs';
import pg from 'pg';
import {
Kysely,
Migrator,
PostgresDialect,
FileMigrationProvider,
} from 'kysely';
import { Kysely, Migrator, FileMigrationProvider } from 'kysely';
import { run } from 'kysely-migration-cli';
import * as dotenv from 'dotenv';
import { envPath } from '../common/helpers/utils';
import { envPath, normalizePostgresUrl } from '../common/helpers';
import { PostgresJSDialect } from 'kysely-postgres-js';
import postgres from 'postgres';
dotenv.config({ path: envPath });
const migrationFolder = path.join(__dirname, './migrations');
const db = new Kysely<any>({
dialect: new PostgresDialect({
pool: new pg.Pool({
connectionString: process.env.DATABASE_URL,
}) as any,
dialect: new PostgresJSDialect({
postgres: postgres(normalizePostgresUrl(process.env.DATABASE_URL)),
}),
});
@@ -10,7 +10,11 @@ import {
} from '../../../collaboration/collaboration.util';
import { InjectKysely } from 'nestjs-kysely';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { generateSlugId, sanitizeFileName } from '../../../common/helpers';
import {
generateSlugId,
sanitizeFileName,
createByteCountingStream,
} from '../../../common/helpers';
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
import { TiptapTransformer } from '@hocuspocus/transformer';
import * as Y from 'yjs';
@@ -173,15 +177,24 @@ export class ImportService {
};
}
async getNewPagePosition(spaceId: string): Promise<string> {
const lastPage = await this.db
async getNewPagePosition(
spaceId: string,
parentPageId?: string,
): Promise<string> {
let query = this.db
.selectFrom('pages')
.select(['id', 'position'])
.where('spaceId', '=', spaceId)
.orderBy('position', (ob) => ob.collate('C').desc())
.limit(1)
.where('parentPageId', 'is', null)
.executeTakeFirst();
.limit(1);
if (parentPageId) {
query = query.where('parentPageId', '=', parentPageId);
} else {
query = query.where('parentPageId', 'is', null);
}
const lastPage = await query.executeTakeFirst();
if (lastPage) {
return generateJitteredKeyBetween(lastPage.position, null);
@@ -198,20 +211,21 @@ export class ImportService {
workspaceId: string,
) {
const file = await filePromise;
const fileBuffer = await file.toBuffer();
const fileExtension = path.extname(file.filename).toLowerCase();
const fileName = sanitizeFileName(
path.basename(file.filename, fileExtension),
);
const fileSize = fileBuffer.length;
const fileNameWithExt = fileName + fileExtension;
const fileTaskId = uuid7();
const filePath = `${getFileTaskFolderPath(FileTaskType.Import, workspaceId)}/${fileTaskId}/${fileNameWithExt}`;
// upload file
await this.storageService.upload(filePath, fileBuffer);
const { stream, getBytesRead } = createByteCountingStream(file.file);
await this.storageService.upload(filePath, stream);
const fileSize = getBytesRead();
const fileTask = await this.db
.insertInto('fileTasks')
@@ -20,9 +20,15 @@ export class LocalDriver implements StorageDriver {
return join(this.config.storagePath, filePath);
}
async upload(filePath: string, file: Buffer): Promise<void> {
async upload(filePath: string, file: Buffer | Readable): Promise<void> {
try {
await fs.outputFile(this._fullPath(filePath), file);
const fullPath = this._fullPath(filePath);
if (file instanceof Buffer) {
await fs.outputFile(fullPath, file);
} else {
await fs.mkdir(dirname(fullPath), { recursive: true });
await pipeline(file, createWriteStream(fullPath));
}
} catch (err) {
throw new Error(`Failed to upload file: ${(err as Error).message}`);
}
@@ -42,7 +48,7 @@ export class LocalDriver implements StorageDriver {
try {
const fromFullPath = this._fullPath(fromFilePath);
const toFullPath = this._fullPath(toFilePath);
if (await this.exists(fromFilePath)) {
await fs.copy(fromFullPath, toFullPath);
}
@@ -23,19 +23,21 @@ export class S3Driver implements StorageDriver {
this.s3Client = new S3Client(config as any);
}
async upload(filePath: string, file: Buffer): Promise<void> {
async upload(filePath: string, file: Buffer | Readable): Promise<void> {
try {
const contentType = getMimeType(filePath);
const command = new PutObjectCommand({
Bucket: this.config.bucket,
Key: filePath,
Body: file,
ContentType: contentType,
// ACL: "public-read",
const upload = new Upload({
client: this.s3Client,
params: {
Bucket: this.config.bucket,
Key: filePath,
Body: file,
ContentType: contentType,
},
});
await this.s3Client.send(command);
await upload.done();
} catch (err) {
throw new Error(`Failed to upload file: ${(err as Error).message}`);
}
@@ -1,7 +1,7 @@
import { Readable } from 'stream';
export interface StorageDriver {
upload(filePath: string, file: Buffer): Promise<void>;
upload(filePath: string, file: Buffer | Readable): Promise<void>;
uploadStream(filePath: string, file: Readable, options?: { recreateClient?: boolean }): Promise<void>;
@@ -8,9 +8,9 @@ export class StorageService {
private readonly logger = new Logger(StorageService.name);
constructor(
@Inject(STORAGE_DRIVER_TOKEN) private storageDriver: StorageDriver,
) {}
) { }
async upload(filePath: string, fileContent: Buffer | any) {
async upload(filePath: string, fileContent: Buffer | Readable) {
await this.storageDriver.upload(filePath, fileContent);
this.logger.debug(`File uploaded successfully. Path: ${filePath}`);
}
+5 -5
View File
@@ -5,9 +5,9 @@ import {
NestFastifyApplication,
} from '@nestjs/platform-fastify';
import { Logger, NotFoundException, ValidationPipe } from '@nestjs/common';
import { Logger as PinoLogger } from 'nestjs-pino';
import { TransformHttpResponseInterceptor } from './common/interceptors/http-response.interceptor';
import { WsRedisIoAdapter } from './ws/adapter/ws-redis.adapter';
import { InternalLogFilter } from './common/logger/internal-log-filter';
import fastifyMultipart from '@fastify/multipart';
import fastifyCookie from '@fastify/cookie';
@@ -24,10 +24,12 @@ async function bootstrap() {
}),
{
rawBody: true,
logger: new InternalLogFilter(),
bufferLogs: true,
},
);
app.useLogger(app.get(PinoLogger));
app.setGlobalPrefix('api', {
exclude: ['robots.txt', 'share/:shareId/p/:pageSlug'],
});
@@ -99,9 +101,7 @@ async function bootstrap() {
const port = process.env.PORT || 3000;
await app.listen(port, '0.0.0.0', () => {
logger.log(
`Listening on http://127.0.0.1:${port} / ${process.env.APP_URL}`,
);
logger.log(`Listening on http://127.0.0.1:${port} / ${process.env.APP_URL}`);
});
}