chore(server): add per-request perf logs for base query cache diagnostics

This commit is contained in:
Philipinho
2026-04-19 22:44:39 +01:00
parent 02c3bdf028
commit 7e07d77510
4 changed files with 136 additions and 3 deletions
@@ -33,6 +33,7 @@ import {
ColumnSpec,
LoadedCollection,
} from './query-cache.types';
import { EnvironmentService } from '../../../integrations/environment/environment.service';
export type CacheListOpts = {
filter?: FilterNode;
@@ -55,6 +56,7 @@ export class BaseQueryCacheService
private readonly baseRepo: BaseRepo,
private readonly collectionLoader: CollectionLoader,
@Optional() private readonly redisService: RedisService | null = null,
@Optional() private readonly env: EnvironmentService | null = null,
) {}
async onApplicationBootstrap(): Promise<void> {
@@ -130,7 +132,12 @@ export class BaseQueryCacheService
workspaceId: string,
opts: CacheListOpts,
): Promise<CursorPaginationResult<BaseRow>> {
const debug = this.env?.getBaseQueryCacheDebug() ?? false;
const tStart = debug ? Date.now() : 0;
const tEnsure = debug ? Date.now() : 0;
const collection = await this.ensureLoaded(baseId, workspaceId);
const ensureMs = debug ? Date.now() - tEnsure : 0;
const sortBuilds: SortBuild[] =
opts.sorts && opts.sorts.length > 0
@@ -178,19 +185,37 @@ export class BaseQueryCacheService
}
}
const tExec = debug ? Date.now() : 0;
const reader = await prepared.runAndReadAll();
const duckRows = reader.getRowObjectsJS();
const execMs = debug ? Date.now() - tExec : 0;
const hasNextPage = duckRows.length > opts.pagination.limit;
if (hasNextPage) duckRows.pop();
if (duckRows.length === 0) {
if (debug) {
console.log(
'[cache-perf]',
JSON.stringify({
phase: 'cache.list',
baseId: baseId.slice(0, 8),
totalMs: Date.now() - tStart,
ensureMs,
execMs,
shapeMs: 0,
rows: 0,
}),
);
}
return emptyCursorPaginationResult<BaseRow>(opts.pagination.limit);
}
const tShape = debug ? Date.now() : 0;
const items = duckRows.map((r) =>
shapeBaseRow(r, collection.columns, sortBuilds),
);
const shapeMs = debug ? Date.now() - tShape : 0;
const endRow = duckRows[duckRows.length - 1];
const startRow = duckRows[0];
@@ -209,6 +234,21 @@ export class BaseQueryCacheService
const nextCursor = hasNextPage ? encodeFromRow(endRow) : null;
const prevCursor = hasPrevPage ? encodeFromRow(startRow) : null;
if (debug) {
console.log(
'[cache-perf]',
JSON.stringify({
phase: 'cache.list',
baseId: baseId.slice(0, 8),
totalMs: Date.now() - tStart,
ensureMs,
execMs,
shapeMs,
rows: items.length,
}),
);
}
return {
items,
meta: {
@@ -354,11 +394,14 @@ export class BaseQueryCacheService
baseId: string,
workspaceId: string,
): Promise<LoadedCollection> {
const debug = this.env?.getBaseQueryCacheDebug() ?? false;
// TODO(task-7): remove per-request findById once pub/sub invalidation
// keeps collections in sync with schema bumps.
const existing = this.collections.get(baseId);
const tFind = debug ? Date.now() : 0;
const base = await this.baseRepo.findById(baseId);
const findMs = debug ? Date.now() - tFind : 0;
if (!base) {
throw new Error(`Base ${baseId} not found`);
}
@@ -367,6 +410,16 @@ export class BaseQueryCacheService
if (existing && existing.schemaVersion === freshVersion) {
existing.lastAccessedAt = Date.now();
this.recordAccess(baseId);
if (debug) {
console.log(
'[cache-perf]',
JSON.stringify({
phase: 'ensureLoaded.hit',
baseId: baseId.slice(0, 8),
findMs,
}),
);
}
return existing;
}
@@ -382,6 +435,7 @@ export class BaseQueryCacheService
return loaded;
}
const tLoad = debug ? Date.now() : 0;
const promise = (async () => {
try {
const { maxCollections } = this.configProvider.config;
@@ -397,7 +451,19 @@ export class BaseQueryCacheService
})();
this.inFlightLoads.set(baseId, promise);
const loaded = await promise;
const loadMs = debug ? Date.now() - tLoad : 0;
this.recordAccess(baseId);
if (debug) {
console.log(
'[cache-perf]',
JSON.stringify({
phase: 'ensureLoaded.miss',
baseId: baseId.slice(0, 8),
findMs,
loadMs,
}),
);
}
return loaded;
}
@@ -1,7 +1,8 @@
import { Injectable } from '@nestjs/common';
import { Injectable, Optional } from '@nestjs/common';
import { QueryCacheConfigProvider } from './query-cache.config';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import type { FilterNode, SearchSpec, SortSpec } from '../engine';
import { EnvironmentService } from '../../../integrations/environment/environment.service';
export type RouteDecision = 'postgres' | 'cache';
@@ -18,6 +19,7 @@ export class BaseQueryRouter {
constructor(
private readonly configProvider: QueryCacheConfigProvider,
private readonly baseRowRepo: BaseRowRepo,
@Optional() private readonly env: EnvironmentService | null = null,
) {}
async decide(args: RouteDecideArgs): Promise<RouteDecision> {
@@ -32,9 +34,23 @@ export class BaseQueryRouter {
// v1: any search stays on Postgres — loader doesn't populate search_text yet.
if (hasSearch) return 'postgres';
const debug = this.env?.getBaseQueryCacheDebug() ?? false;
const tCount = debug ? Date.now() : 0;
const count = await this.baseRowRepo.countActiveRows(args.baseId, {
workspaceId: args.workspaceId,
});
if (debug) {
console.log(
'[cache-perf]',
JSON.stringify({
phase: 'router.countActiveRows',
baseId: args.baseId.slice(0, 8),
countMs: Date.now() - tCount,
count,
minRows,
}),
);
}
if (count < minRows) return 'postgres';
return 'cache';
@@ -44,6 +44,7 @@ import {
BaseRowUpdatedEvent,
BaseRowsDeletedEvent,
} from '../events/base-events';
import { EnvironmentService } from '../../../integrations/environment/environment.service';
@Injectable()
export class BaseRowService {
@@ -57,6 +58,7 @@ export class BaseRowService {
private readonly eventEmitter: EventEmitter2,
private readonly queryRouter: BaseQueryRouter,
private readonly queryCache: BaseQueryCacheService,
private readonly env: EnvironmentService,
) {}
async create(userId: string, workspaceId: string, dto: CreateRowDto) {
@@ -197,6 +199,9 @@ export class BaseRowService {
pagination: PaginationOptions,
workspaceId: string,
) {
const debug = this.env.getBaseQueryCacheDebug();
const tStart = debug ? Date.now() : 0;
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
const schema: PropertySchema = new Map(
properties.map((p) => [p.id, p]),
@@ -209,6 +214,7 @@ export class BaseRowService {
direction: s.direction,
}));
const tRouter = debug ? Date.now() : 0;
const decision = await this.queryRouter.decide({
baseId: dto.baseId,
workspaceId,
@@ -216,26 +222,48 @@ export class BaseRowService {
sorts,
search,
});
const routerMs = debug ? Date.now() - tRouter : 0;
let resultPath: 'cache' | 'postgres' | 'fallback' = 'postgres';
if (decision === 'cache') {
try {
return await this.queryCache.list(dto.baseId, workspaceId, {
const tCache = debug ? Date.now() : 0;
const result = await this.queryCache.list(dto.baseId, workspaceId, {
filter,
sorts,
search,
schema,
pagination,
});
const cacheMs = debug ? Date.now() - tCache : 0;
resultPath = 'cache';
if (debug) {
console.log(
'[cache-perf]',
JSON.stringify({
path: resultPath,
baseId: dto.baseId.slice(0, 8),
totalMs: Date.now() - tStart,
routerMs,
cacheMs,
rows: result.items.length,
}),
);
}
return result;
} catch (err) {
const error = err as Error;
this.logger.warn(
`Cache list failed for base ${dto.baseId}, falling back to Postgres: ${error.message}`,
);
if (error.stack) this.logger.warn(error.stack);
resultPath = 'fallback';
}
}
return this.baseRowRepo.list({
const tPg = debug ? Date.now() : 0;
const result = await this.baseRowRepo.list({
baseId: dto.baseId,
workspaceId,
filter,
@@ -244,6 +272,21 @@ export class BaseRowService {
schema,
pagination,
});
const pgMs = debug ? Date.now() - tPg : 0;
if (debug) {
console.log(
'[cache-perf]',
JSON.stringify({
path: resultPath,
baseId: dto.baseId.slice(0, 8),
totalMs: Date.now() - tStart,
routerMs,
pgMs,
rows: result.items.length,
}),
);
}
return result;
}
async reorder(dto: ReorderRowDto, workspaceId: string, userId?: string) {
@@ -335,4 +335,12 @@ export class EnvironmentService {
10,
);
}
getBaseQueryCacheDebug(): boolean {
return (
this.configService
.get<string>('BASE_QUERY_CACHE_DEBUG', 'false')
.toLowerCase() === 'true'
);
}
}