mirror of
https://github.com/docmost/docmost.git
synced 2026-05-07 06:23:06 +08:00
fix(server): close duckdb resources on load failure, dedupe concurrent loads, drop unused cells projection
This commit is contained in:
@@ -40,6 +40,7 @@ export class BaseQueryCacheService
|
||||
{
|
||||
private readonly logger = new Logger(BaseQueryCacheService.name);
|
||||
private readonly collections = new Map<string, LoadedCollection>();
|
||||
private readonly inFlightLoads = new Map<string, Promise<LoadedCollection>>();
|
||||
|
||||
constructor(
|
||||
private readonly configProvider: QueryCacheConfigProvider,
|
||||
@@ -168,6 +169,8 @@ export class BaseQueryCacheService
|
||||
baseId: string,
|
||||
workspaceId: string,
|
||||
): Promise<LoadedCollection> {
|
||||
// TODO(task-7): remove per-request findById once pub/sub invalidation
|
||||
// keeps collections in sync with schema bumps.
|
||||
const existing = this.collections.get(baseId);
|
||||
|
||||
const base = await this.baseRepo.findById(baseId);
|
||||
@@ -186,14 +189,24 @@ export class BaseQueryCacheService
|
||||
this.collections.delete(baseId);
|
||||
}
|
||||
|
||||
const inFlight = this.inFlightLoads.get(baseId);
|
||||
if (inFlight) return inFlight;
|
||||
|
||||
const promise = (async () => {
|
||||
try {
|
||||
const { maxCollections } = this.configProvider.config;
|
||||
if (this.collections.size >= maxCollections) {
|
||||
this.evictLru();
|
||||
}
|
||||
|
||||
const loaded = await this.collectionLoader.load(baseId, workspaceId);
|
||||
this.collections.set(baseId, loaded);
|
||||
return loaded;
|
||||
} finally {
|
||||
this.inFlightLoads.delete(baseId);
|
||||
}
|
||||
})();
|
||||
this.inFlightLoads.set(baseId, promise);
|
||||
return promise;
|
||||
}
|
||||
|
||||
private evictLru(): void {
|
||||
@@ -228,10 +241,10 @@ export class BaseQueryCacheService
|
||||
}
|
||||
|
||||
// Convert a DuckDB row object back into the BaseRow JSON shape. The builder
|
||||
// projects `cells` as a json_object keyed by property id; typed columns
|
||||
// (DOUBLE, BOOLEAN, TIMESTAMPTZ) round-trip as JS primitives / Date objects.
|
||||
// We reconstruct `cells` directly from the per-property columns so the JSON
|
||||
// payload matches what Postgres returns.
|
||||
// projects one column per user property; typed columns (DOUBLE, BOOLEAN,
|
||||
// TIMESTAMPTZ) round-trip as JS primitives / Date objects. We reconstruct
|
||||
// `cells` directly from the per-property columns so the JSON payload matches
|
||||
// what Postgres returns.
|
||||
function shapeBaseRow(
|
||||
raw: Record<string, unknown>,
|
||||
specs: ColumnSpec[],
|
||||
|
||||
@@ -38,13 +38,16 @@ export class CollectionLoader {
|
||||
|
||||
const instance = await DuckDBInstance.create(':memory:');
|
||||
const connection = await instance.connect();
|
||||
let appender: Awaited<ReturnType<typeof connection.createAppender>> | null =
|
||||
null;
|
||||
|
||||
try {
|
||||
const ddl = `CREATE TABLE rows (${specs
|
||||
.map((s) => `${quoteIdent(s.column)} ${s.ddlType}`)
|
||||
.join(', ')}, PRIMARY KEY (${quoteIdent('id')}))`;
|
||||
await connection.run(ddl);
|
||||
|
||||
const appender = await connection.createAppender('rows');
|
||||
appender = await connection.createAppender('rows');
|
||||
|
||||
let rowCount = 0;
|
||||
for await (const chunk of this.baseRowRepo.streamByBaseId(baseId, {
|
||||
@@ -100,6 +103,7 @@ export class CollectionLoader {
|
||||
}
|
||||
appender.flushSync();
|
||||
appender.closeSync();
|
||||
appender = null;
|
||||
|
||||
for (const spec of specs) {
|
||||
if (!spec.indexable) continue;
|
||||
@@ -121,6 +125,26 @@ export class CollectionLoader {
|
||||
connection,
|
||||
lastAccessedAt: Date.now(),
|
||||
};
|
||||
} catch (err) {
|
||||
if (appender) {
|
||||
try {
|
||||
appender.closeSync();
|
||||
} catch {
|
||||
// swallow — best-effort cleanup
|
||||
}
|
||||
}
|
||||
try {
|
||||
connection.closeSync();
|
||||
} catch {
|
||||
// swallow — best-effort cleanup
|
||||
}
|
||||
try {
|
||||
instance.closeSync();
|
||||
} catch {
|
||||
// swallow — best-effort cleanup
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -97,11 +97,9 @@ export function buildDuckDbListQuery(
|
||||
// --- select projection -------------------------------------------------
|
||||
|
||||
function buildSelect(index: ColumnIndex, sortBuilds: SortBuild[]): string[] {
|
||||
const cellsJson = buildCellsJson(index.userColumns);
|
||||
const parts: string[] = [
|
||||
'id',
|
||||
'base_id',
|
||||
`${cellsJson} AS cells`,
|
||||
'position',
|
||||
'creator_id',
|
||||
'last_updated_by_id',
|
||||
@@ -110,22 +108,15 @@ function buildSelect(index: ColumnIndex, sortBuilds: SortBuild[]): string[] {
|
||||
'updated_at',
|
||||
'deleted_at',
|
||||
];
|
||||
for (const col of index.userColumns) {
|
||||
parts.push(quoteIdent(col.column));
|
||||
}
|
||||
for (const sb of sortBuilds) {
|
||||
parts.push(`${sb.expression} AS ${sb.key}`);
|
||||
}
|
||||
return parts;
|
||||
}
|
||||
|
||||
function buildCellsJson(userColumns: ColumnSpec[]): string {
|
||||
if (userColumns.length === 0) return `'{}'::JSON`;
|
||||
const entries: string[] = [];
|
||||
for (const col of userColumns) {
|
||||
entries.push(`'${col.column}'`);
|
||||
entries.push(quoteIdent(col.column));
|
||||
}
|
||||
return `json_object(${entries.join(', ')})`;
|
||||
}
|
||||
|
||||
// --- filter ------------------------------------------------------------
|
||||
|
||||
function buildFilter(
|
||||
|
||||
Reference in New Issue
Block a user