feat(server): load bases into DuckDB and serve list queries from cache

- collection-loader streams base rows via postgres and bulk-inserts into an
  in-memory DuckDB instance using the Appender API, then builds an index on
  each indexable column
- base-query-cache service routes list() calls through the prepared-statement
  path; ensureLoaded does schema-version checks with single-pass LRU eviction
- keyset param-ordering bug in the DuckDB builder fixed: placeholders appear
  head-to-tail but were being pushed tail-to-head, which made DuckDB bind the
  wrong value for each ? and throw Binder Error on typed columns
- base-row repo gains countActiveRows for the router to use in task 6
- seed script split into an importable helper so integration tests can seed a
  10k-row base deterministically without shelling out
- new integration spec compares Postgres vs DuckDB pagination end-to-end for
  a numeric sort and guards against duplicate rows from DuckDB

Integration test is skipped unless INTEGRATION_DB_URL is set.
This commit is contained in:
Philipinho
2026-04-19 21:31:05 +01:00
parent b28597125d
commit 91ad3de258
9 changed files with 1117 additions and 276 deletions
@@ -0,0 +1,222 @@
import { Test, TestingModule } from '@nestjs/testing';
import { ConfigModule } from '@nestjs/config';
import { KyselyModule, InjectKysely } from 'nestjs-kysely';
import { CamelCasePlugin } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js';
import * as postgres from 'postgres';
import { Injectable } from '@nestjs/common';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { BaseQueryCacheService } from './base-query-cache.service';
import { QueryCacheConfigProvider } from './query-cache.config';
import { CollectionLoader } from './collection-loader';
import { EnvironmentService } from '../../../integrations/environment/environment.service';
import { seedBase, deleteSeededBase } from './testing/seed-base';
import { PropertySchema } from '../engine';
const INTEGRATION_DB_URL = process.env.INTEGRATION_DB_URL;
// Minimal EnvironmentService stand-in that only implements the methods used
// by query-cache and the repos we touch.
@Injectable()
class FakeEnvService {
getDatabaseURL() {
return INTEGRATION_DB_URL!;
}
getDatabaseMaxPool() {
return 5;
}
getNodeEnv() {
return 'test';
}
getBaseQueryCacheEnabled() {
return true;
}
getBaseQueryCacheMinRows() {
return 100;
}
getBaseQueryCacheMaxCollections() {
return 10;
}
getBaseQueryCacheWarmTopN() {
return 0;
}
}
@Injectable()
class DbHandle {
constructor(@InjectKysely() readonly db: KyselyDB) {}
}
function normalizePostgresUrl(url: string): string {
const parsed = new URL(url);
const newParams = new URLSearchParams();
for (const [key, value] of parsed.searchParams) {
if (key === 'sslmode' && value === 'no-verify') continue;
if (key === 'schema') continue;
newParams.append(key, value);
}
parsed.search = newParams.toString();
return parsed.toString();
}
const describeIntegration = INTEGRATION_DB_URL ? describe : describe.skip;
describeIntegration('BaseQueryCacheService integration', () => {
let moduleRef: TestingModule;
let cache: BaseQueryCacheService;
let baseRowRepo: BaseRowRepo;
let basePropertyRepo: BasePropertyRepo;
let dbHandle: DbHandle;
let seededBaseId: string | null = null;
let workspaceId: string;
let spaceId: string;
let creatorUserId: string | null;
beforeAll(async () => {
process.env.DATABASE_URL = INTEGRATION_DB_URL;
process.env.BASE_QUERY_CACHE_ENABLED = 'true';
process.env.BASE_QUERY_CACHE_MIN_ROWS = '100';
moduleRef = await Test.createTestingModule({
imports: [
ConfigModule.forRoot({ isGlobal: true }),
KyselyModule.forRoot({
dialect: new PostgresJSDialect({
postgres: (postgres as any)(
normalizePostgresUrl(INTEGRATION_DB_URL!),
{
max: 5,
onnotice: () => {},
types: {
bigint: {
to: 20,
from: [20, 1700],
serialize: (value: number) => value.toString(),
parse: (value: string) => Number.parseInt(value),
},
},
},
),
}),
plugins: [new CamelCasePlugin()],
}),
],
providers: [
{ provide: EnvironmentService, useClass: FakeEnvService },
QueryCacheConfigProvider,
BaseRepo,
BasePropertyRepo,
BaseRowRepo,
BaseViewRepo,
CollectionLoader,
BaseQueryCacheService,
DbHandle,
],
}).compile();
cache = moduleRef.get(BaseQueryCacheService);
baseRowRepo = moduleRef.get(BaseRowRepo);
basePropertyRepo = moduleRef.get(BasePropertyRepo);
dbHandle = moduleRef.get(DbHandle);
const workspace = await dbHandle.db
.selectFrom('workspaces')
.select(['id'])
.limit(1)
.executeTakeFirstOrThrow();
workspaceId = workspace.id;
const space = await dbHandle.db
.selectFrom('spaces')
.select(['id'])
.where('workspaceId', '=', workspaceId)
.limit(1)
.executeTakeFirstOrThrow();
spaceId = space.id;
const user = await dbHandle.db
.selectFrom('users')
.select('id')
.limit(1)
.executeTakeFirst();
creatorUserId = user?.id ?? null;
const { baseId } = await seedBase({
db: dbHandle.db as any,
workspaceId,
spaceId,
creatorUserId,
rows: 10000,
name: `cache-integration-${Date.now()}`,
});
seededBaseId = baseId;
}, 180_000);
afterAll(async () => {
if (seededBaseId) {
await deleteSeededBase(dbHandle.db as any, seededBaseId);
}
if (moduleRef) {
await moduleRef.close();
}
}, 60_000);
it(
'returns the same rows as Postgres for a numeric-sort full pagination',
async () => {
const baseId = seededBaseId!;
const properties = await basePropertyRepo.findByBaseId(baseId);
const schema: PropertySchema = new Map(properties.map((p) => [p.id, p]));
const estimateProp = properties.find((p) => p.name === 'Estimate');
if (!estimateProp) throw new Error('Estimate property not found');
const limit = 500;
const pgIds: string[] = [];
let pgCursor: string | undefined = undefined;
for (;;) {
const page = await baseRowRepo.list({
baseId,
workspaceId,
sorts: [{ propertyId: estimateProp.id, direction: 'asc' }],
schema,
pagination: { limit, cursor: pgCursor } as any,
});
for (const item of page.items) pgIds.push(item.id);
if (!page.meta.hasNextPage || !page.meta.nextCursor) break;
pgCursor = page.meta.nextCursor;
}
const ddIds: string[] = [];
let ddCursor: string | undefined = undefined;
for (;;) {
const page = await cache.list(baseId, workspaceId, {
sorts: [{ propertyId: estimateProp.id, direction: 'asc' }],
schema,
pagination: { limit, cursor: ddCursor } as any,
});
for (const item of page.items) ddIds.push(item.id);
if (!page.meta.hasNextPage || !page.meta.nextCursor) break;
ddCursor = page.meta.nextCursor;
}
// Both engines should emit every live row at least once, and DuckDB
// should emit each row exactly once (no duplicates). We compare
// unique sorted id lists rather than raw page arrays because the
// existing Postgres engine can repeat rows on tie-heavy numeric
// sorts when the DB's default collation applies non-byte ordering
// to `position`.
const ddUniq = new Set(ddIds);
expect(ddIds.length).toBe(ddUniq.size); // DuckDB emits no duplicates
expect(ddUniq.size).toBe(10_000);
const pgSorted = [...new Set(pgIds)].sort();
const ddSorted = [...ddUniq].sort();
expect(ddSorted).toEqual(pgSorted);
},
60_000,
);
});
@@ -4,25 +4,290 @@ import {
OnApplicationBootstrap, OnApplicationBootstrap,
OnModuleDestroy, OnModuleDestroy,
} from '@nestjs/common'; } from '@nestjs/common';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { BaseRow } from '@docmost/db/types/entity.types';
import {
CursorPaginationResult,
emptyCursorPaginationResult,
} from '@docmost/db/pagination/cursor-pagination';
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
import {
CURSOR_TAIL_KEYS,
FilterNode,
PropertySchema,
SearchSpec,
SortBuild,
SortSpec,
buildSorts,
makeCursor,
} from '../engine';
import { QueryCacheConfigProvider } from './query-cache.config'; import { QueryCacheConfigProvider } from './query-cache.config';
import { CollectionLoader } from './collection-loader';
import { buildDuckDbListQuery } from './duckdb-query-builder';
import { ColumnSpec, LoadedCollection } from './query-cache.types';
export type CacheListOpts = {
filter?: FilterNode;
sorts?: SortSpec[];
search?: SearchSpec;
schema: PropertySchema;
pagination: PaginationOptions;
};
@Injectable() @Injectable()
export class BaseQueryCacheService export class BaseQueryCacheService
implements OnApplicationBootstrap, OnModuleDestroy implements OnApplicationBootstrap, OnModuleDestroy
{ {
private readonly logger = new Logger(BaseQueryCacheService.name); private readonly logger = new Logger(BaseQueryCacheService.name);
private readonly collections = new Map<string, LoadedCollection>();
constructor(private readonly configProvider: QueryCacheConfigProvider) {} constructor(
private readonly configProvider: QueryCacheConfigProvider,
private readonly baseRepo: BaseRepo,
private readonly collectionLoader: CollectionLoader,
) {}
async onApplicationBootstrap(): Promise<void> { async onApplicationBootstrap(): Promise<void> {
const { enabled } = this.configProvider.config; const { enabled } = this.configProvider.config;
this.logger.log( this.logger.log(
`BaseQueryCacheService bootstrapped (enabled=${enabled}).`, `BaseQueryCacheService bootstrapped (enabled=${enabled}).`,
); );
// Real warm-up is added in task 9.
} }
async onModuleDestroy(): Promise<void> { async onModuleDestroy(): Promise<void> {
// Real cleanup is added in task 5. for (const [, collection] of this.collections) {
this.closeCollection(collection);
}
this.collections.clear();
}
async list(
baseId: string,
workspaceId: string,
opts: CacheListOpts,
): Promise<CursorPaginationResult<BaseRow>> {
const collection = await this.ensureLoaded(baseId, workspaceId);
const sortBuilds: SortBuild[] =
opts.sorts && opts.sorts.length > 0
? buildSorts(opts.sorts, opts.schema)
: [];
const cursor = makeCursor(sortBuilds, CURSOR_TAIL_KEYS);
const sortFieldKeys = sortBuilds.map((s) => s.key);
const allFieldKeys = [...sortFieldKeys, 'position', 'id'];
let afterKeys: Record<string, unknown> | undefined;
if (opts.pagination.cursor) {
const decoded = cursor.decodeCursor(opts.pagination.cursor, allFieldKeys);
afterKeys = cursor.parseCursor(decoded);
}
const { sql, params } = buildDuckDbListQuery({
columns: collection.columns,
filter: opts.filter,
sorts: opts.sorts,
search: opts.search,
pagination: {
limit: opts.pagination.limit,
afterKeys: afterKeys as any,
},
});
const prepared = await collection.connection.prepare(sql);
for (let i = 0; i < params.length; i++) {
const p = params[i];
const oneBased = i + 1;
if (p === null || p === undefined) {
prepared.bindNull(oneBased);
} else if (typeof p === 'string') {
prepared.bindVarchar(oneBased, p);
} else if (typeof p === 'number') {
prepared.bindDouble(oneBased, p);
} else if (typeof p === 'boolean') {
prepared.bindBoolean(oneBased, p);
} else if (p instanceof Date) {
prepared.bindVarchar(oneBased, p.toISOString());
} else {
prepared.bindVarchar(oneBased, JSON.stringify(p));
}
}
const reader = await prepared.runAndReadAll();
const duckRows = reader.getRowObjectsJS();
const hasNextPage = duckRows.length > opts.pagination.limit;
if (hasNextPage) duckRows.pop();
if (duckRows.length === 0) {
return emptyCursorPaginationResult<BaseRow>(opts.pagination.limit);
}
const items = duckRows.map((r) =>
shapeBaseRow(r, collection.columns, sortBuilds),
);
const endRow = duckRows[duckRows.length - 1];
const startRow = duckRows[0];
const encodeFromRow = (raw: Record<string, unknown>): string => {
const entries: Array<[string, unknown]> = [];
for (const sb of sortBuilds) {
entries.push([sb.key, raw[sb.key]]);
}
entries.push(['position', raw.position]);
entries.push(['id', raw.id]);
return cursor.encodeCursor(entries);
};
const hasPrevPage = !!opts.pagination.cursor;
const nextCursor = hasNextPage ? encodeFromRow(endRow) : null;
const prevCursor = hasPrevPage ? encodeFromRow(startRow) : null;
return {
items,
meta: {
limit: opts.pagination.limit,
hasNextPage,
hasPrevPage,
nextCursor,
prevCursor,
},
};
}
async invalidate(baseId: string): Promise<void> {
const collection = this.collections.get(baseId);
if (!collection) return;
this.closeCollection(collection);
this.collections.delete(baseId);
}
private async ensureLoaded(
baseId: string,
workspaceId: string,
): Promise<LoadedCollection> {
const existing = this.collections.get(baseId);
const base = await this.baseRepo.findById(baseId);
if (!base) {
throw new Error(`Base ${baseId} not found`);
}
const freshVersion = (base as any).schemaVersion ?? 1;
if (existing && existing.schemaVersion === freshVersion) {
existing.lastAccessedAt = Date.now();
return existing;
}
if (existing) {
this.closeCollection(existing);
this.collections.delete(baseId);
}
const { maxCollections } = this.configProvider.config;
if (this.collections.size >= maxCollections) {
this.evictLru();
}
const loaded = await this.collectionLoader.load(baseId, workspaceId);
this.collections.set(baseId, loaded);
return loaded;
}
private evictLru(): void {
let oldestKey: string | null = null;
let oldestTime = Number.POSITIVE_INFINITY;
for (const [key, col] of this.collections) {
if (col.lastAccessedAt < oldestTime) {
oldestTime = col.lastAccessedAt;
oldestKey = key;
}
}
if (oldestKey) {
const col = this.collections.get(oldestKey)!;
this.closeCollection(col);
this.collections.delete(oldestKey);
this.logger.debug(`Evicted LRU collection ${oldestKey}`);
}
}
private closeCollection(collection: LoadedCollection): void {
try {
collection.connection.closeSync();
} catch (err) {
this.logger.warn(`Failed to close connection: ${(err as Error).message}`);
}
try {
collection.instance.closeSync();
} catch (err) {
this.logger.warn(`Failed to close instance: ${(err as Error).message}`);
}
} }
} }
// Convert a DuckDB row object back into the BaseRow JSON shape. The builder
// projects `cells` as a json_object keyed by property id; typed columns
// (DOUBLE, BOOLEAN, TIMESTAMPTZ) round-trip as JS primitives / Date objects.
// We reconstruct `cells` directly from the per-property columns so the JSON
// payload matches what Postgres returns.
function shapeBaseRow(
raw: Record<string, unknown>,
specs: ColumnSpec[],
_sortBuilds: SortBuild[],
): BaseRow {
const cells: Record<string, unknown> = {};
for (const spec of specs) {
if (!spec.property) continue; // system columns handled below
const v = raw[spec.column];
cells[spec.property.id] = normaliseCellValue(v, spec);
}
return {
id: String(raw.id),
baseId: String(raw.base_id),
cells: cells as any,
position: String(raw.position),
creatorId: raw.creator_id == null ? null : String(raw.creator_id),
lastUpdatedById:
raw.last_updated_by_id == null ? null : String(raw.last_updated_by_id),
workspaceId: String(raw.workspace_id),
createdAt: toDate(raw.created_at),
updatedAt: toDate(raw.updated_at),
deletedAt: raw.deleted_at == null ? null : toDate(raw.deleted_at),
} as BaseRow;
}
function normaliseCellValue(value: unknown, spec: ColumnSpec): unknown {
if (value == null) return null;
switch (spec.ddlType) {
case 'VARCHAR':
return String(value);
case 'DOUBLE':
return typeof value === 'number' ? value : Number(value);
case 'BOOLEAN':
return Boolean(value);
case 'TIMESTAMPTZ': {
if (value instanceof Date) return value.toISOString();
return String(value);
}
case 'JSON': {
if (typeof value === 'string') {
try {
return JSON.parse(value);
} catch {
return value;
}
}
return value;
}
default:
return value;
}
}
function toDate(value: unknown): Date {
if (value instanceof Date) return value;
return new Date(String(value));
}
@@ -0,0 +1,165 @@
import { Injectable, Logger } from '@nestjs/common';
import { DuckDBInstance } from '@duckdb/node-api';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BaseRow } from '@docmost/db/types/entity.types';
import { BasePropertyType } from '../base.schemas';
import { buildColumnSpecs } from './column-types';
import { ColumnSpec, LoadedCollection } from './query-cache.types';
// System property type → DuckDB system column name (snake_case). Mirrors
// the mapping in duckdb-query-builder.ts.
const SYSTEM_PROPERTY_COLUMN: Record<string, keyof BaseRow> = {
[BasePropertyType.CREATED_AT]: 'createdAt',
[BasePropertyType.LAST_EDITED_AT]: 'updatedAt',
[BasePropertyType.LAST_EDITED_BY]: 'lastUpdatedById',
};
@Injectable()
export class CollectionLoader {
private readonly logger = new Logger(CollectionLoader.name);
constructor(
private readonly baseRepo: BaseRepo,
private readonly basePropertyRepo: BasePropertyRepo,
private readonly baseRowRepo: BaseRowRepo,
) {}
async load(baseId: string, workspaceId: string): Promise<LoadedCollection> {
const base = await this.baseRepo.findById(baseId);
if (!base) {
throw new Error(`Base ${baseId} not found`);
}
const schemaVersion = (base as any).schemaVersion ?? 1;
const properties = await this.basePropertyRepo.findByBaseId(baseId);
const specs = buildColumnSpecs(properties);
const instance = await DuckDBInstance.create(':memory:');
const connection = await instance.connect();
const ddl = `CREATE TABLE rows (${specs
.map((s) => `${quoteIdent(s.column)} ${s.ddlType}`)
.join(', ')}, PRIMARY KEY (${quoteIdent('id')}))`;
await connection.run(ddl);
const appender = await connection.createAppender('rows');
let rowCount = 0;
for await (const chunk of this.baseRowRepo.streamByBaseId(baseId, {
workspaceId,
chunkSize: 5000,
})) {
for (const row of chunk) {
for (const spec of specs) {
const raw = readFromRow(row, spec);
if (raw == null) {
appender.appendNull();
continue;
}
switch (spec.ddlType) {
case 'VARCHAR':
appender.appendVarchar(String(raw));
break;
case 'DOUBLE': {
const n = Number(raw);
if (Number.isNaN(n)) {
this.logger.debug(
`Malformed number for ${spec.column} on row ${row.id}`,
);
appender.appendNull();
break;
}
appender.appendDouble(n);
break;
}
case 'BOOLEAN':
appender.appendBoolean(Boolean(raw));
break;
case 'TIMESTAMPTZ': {
const d = raw instanceof Date ? raw : new Date(String(raw));
if (Number.isNaN(d.getTime())) {
this.logger.debug(
`Malformed timestamp for ${spec.column} on row ${row.id}`,
);
appender.appendNull();
break;
}
appender.appendVarchar(d.toISOString());
break;
}
case 'JSON':
appender.appendVarchar(JSON.stringify(raw));
break;
}
}
appender.endRow();
rowCount++;
}
}
appender.flushSync();
appender.closeSync();
for (const spec of specs) {
if (!spec.indexable) continue;
const safe = spec.column.replace(/[^a-zA-Z0-9_]/g, '_');
await connection.run(
`CREATE INDEX ${quoteIdent(`idx_${safe}`)} ON rows (${quoteIdent(spec.column)})`,
);
}
this.logger.debug(
`Loaded ${rowCount} rows for base ${baseId} (schemaVersion=${schemaVersion})`,
);
return {
baseId,
schemaVersion,
columns: specs,
instance,
connection,
lastAccessedAt: Date.now(),
};
}
}
function readFromRow(row: BaseRow, spec: ColumnSpec): unknown {
// System columns
switch (spec.column) {
case 'id':
return row.id;
case 'base_id':
return row.baseId;
case 'workspace_id':
return row.workspaceId;
case 'creator_id':
return row.creatorId;
case 'position':
return row.position;
case 'created_at':
return row.createdAt;
case 'updated_at':
return row.updatedAt;
case 'last_updated_by_id':
return row.lastUpdatedById;
case 'deleted_at':
return null; // loader only inserts live rows
case 'search_text':
return ''; // search stays on Postgres in v1
}
// User-defined columns: look up by property id
const prop = spec.property;
if (!prop) return null;
const sysColumn = SYSTEM_PROPERTY_COLUMN[prop.type];
if (sysColumn) return (row as any)[sysColumn];
const cells = (row.cells as Record<string, unknown> | null) ?? {};
return cells[prop.id] ?? null;
}
function quoteIdent(name: string): string {
return `"${name.replace(/"/g, '""')}"`;
}
@@ -4,6 +4,9 @@ import type { BaseProperty } from '@docmost/db/types/entity.types';
export const SYSTEM_COLUMNS: ColumnSpec[] = [ export const SYSTEM_COLUMNS: ColumnSpec[] = [
{ column: 'id', ddlType: 'VARCHAR', indexable: false }, { column: 'id', ddlType: 'VARCHAR', indexable: false },
{ column: 'base_id', ddlType: 'VARCHAR', indexable: false },
{ column: 'workspace_id', ddlType: 'VARCHAR', indexable: false },
{ column: 'creator_id', ddlType: 'VARCHAR', indexable: false },
{ column: 'position', ddlType: 'VARCHAR', indexable: true }, { column: 'position', ddlType: 'VARCHAR', indexable: true },
{ column: 'created_at', ddlType: 'TIMESTAMPTZ', indexable: true }, { column: 'created_at', ddlType: 'TIMESTAMPTZ', indexable: true },
{ column: 'updated_at', ddlType: 'TIMESTAMPTZ', indexable: true }, { column: 'updated_at', ddlType: 'TIMESTAMPTZ', indexable: true },
@@ -563,6 +563,11 @@ function buildKeyset(
// Mirrors cursor-pagination.ts `applyCursor`: builds the lexicographic // Mirrors cursor-pagination.ts `applyCursor`: builds the lexicographic
// OR-chain from tail to head, wrapping each step as // OR-chain from tail to head, wrapping each step as
// `(fi > v) OR (fi = v AND <tail>)`. // `(fi > v) OR (fi = v AND <tail>)`.
//
// Param binding is positional (1-based `?`). Placeholders appear
// left-to-right in the final SQL as: leg0(head), leg0(tie), leg1(head),
// leg1(tie), ..., legN(head). We therefore collect the per-leg params
// first, then flatten in head→tail order at the end.
type Leg = { key: string; expression: string; direction: 'asc' | 'desc' }; type Leg = { key: string; expression: string; direction: 'asc' | 'desc' };
const legs: Leg[] = [ const legs: Leg[] = [
...sortBuilds.map((s) => ({ ...sortBuilds.map((s) => ({
@@ -574,25 +579,37 @@ function buildKeyset(
{ key: 'id', expression: 'id', direction: 'asc' }, { key: 'id', expression: 'id', direction: 'asc' },
]; ];
// Skip legs whose key is absent from afterKeys (shouldn't happen for
// well-formed cursors, but keeps the builder defensive).
const usable = legs.filter((l) => l.key in afterKeys);
if (usable.length === 0) return 'TRUE';
// legParams[i] = [value, value?] — one push for the head `>` or `<`,
// one more push for the tie `=` on every leg except the last.
const legParams: unknown[][] = [];
let expr = ''; let expr = '';
for (let i = legs.length - 1; i >= 0; i--) { for (let i = usable.length - 1; i >= 0; i--) {
const leg = legs[i]; const leg = usable[i];
if (!(leg.key in afterKeys)) continue;
const value = afterKeys[leg.key]; const value = afterKeys[leg.key];
const cmp = leg.direction === 'asc' ? '>' : '<'; const cmp = leg.direction === 'asc' ? '>' : '<';
params.push(value);
const head = `${leg.expression} ${cmp} ?`; const head = `${leg.expression} ${cmp} ?`;
if (!expr) { if (!expr) {
legParams[i] = [value];
expr = head; expr = head;
continue; continue;
} }
params.push(value); legParams[i] = [value, value];
const tie = `${leg.expression} = ?`; const tie = `${leg.expression} = ?`;
expr = `(${head} OR (${tie} AND ${expr}))`; expr = `(${head} OR (${tie} AND ${expr}))`;
} }
return expr || 'TRUE';
// Flatten legs in head→tail (placeholder) order.
for (const values of legParams) {
for (const v of values) params.push(v);
}
return expr;
} }
// --- utilities --------------------------------------------------------- // --- utilities ---------------------------------------------------------
@@ -2,9 +2,15 @@ import { Module } from '@nestjs/common';
import { QueryCacheConfigProvider } from './query-cache.config'; import { QueryCacheConfigProvider } from './query-cache.config';
import { BaseQueryCacheService } from './base-query-cache.service'; import { BaseQueryCacheService } from './base-query-cache.service';
import { BaseQueryRouter } from './base-query-router'; import { BaseQueryRouter } from './base-query-router';
import { CollectionLoader } from './collection-loader';
@Module({ @Module({
providers: [QueryCacheConfigProvider, BaseQueryCacheService, BaseQueryRouter], providers: [
QueryCacheConfigProvider,
BaseQueryCacheService,
BaseQueryRouter,
CollectionLoader,
],
exports: [BaseQueryCacheService, BaseQueryRouter, QueryCacheConfigProvider], exports: [BaseQueryCacheService, BaseQueryRouter, QueryCacheConfigProvider],
}) })
export class BaseQueryCacheModule {} export class BaseQueryCacheModule {}
@@ -0,0 +1,399 @@
import type { Kysely } from 'kysely';
import { randomBytes } from 'node:crypto';
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
// Minimal RFC 9562 uuid7. We inline instead of importing `uuid@13` because
// that package is ESM-only and this module is loaded by jest (CommonJS) in
// the integration spec.
function uuid7(): string {
const now = BigInt(Date.now());
const bytes = randomBytes(16);
bytes[0] = Number((now >> 40n) & 0xffn);
bytes[1] = Number((now >> 32n) & 0xffn);
bytes[2] = Number((now >> 24n) & 0xffn);
bytes[3] = Number((now >> 16n) & 0xffn);
bytes[4] = Number((now >> 8n) & 0xffn);
bytes[5] = Number(now & 0xffn);
bytes[6] = (bytes[6] & 0x0f) | 0x70; // version 7
bytes[8] = (bytes[8] & 0x3f) | 0x80; // variant
const hex = bytes.toString('hex');
return (
hex.slice(0, 8) +
'-' +
hex.slice(8, 12) +
'-' +
hex.slice(12, 16) +
'-' +
hex.slice(16, 20) +
'-' +
hex.slice(20, 32)
);
}
export type SeedBaseOptions = {
db: Kysely<any>;
workspaceId: string;
spaceId: string;
creatorUserId: string | null;
rows: number;
name?: string;
};
export type SeededBase = {
baseId: string;
propertyIds: {
title: string;
status: string;
priority: string;
category: string;
tags: string;
dueDate: string;
estimate: string;
budget: string;
approved: string;
website: string;
contactEmail: string;
notes: string;
created: string;
lastEdited: string;
};
};
const SKIP_TYPES = new Set([
'createdAt',
'lastEditedAt',
'lastEditedBy',
'person',
'file',
]);
const WORDS = [
'Alpha', 'Bravo', 'Charlie', 'Delta', 'Echo', 'Foxtrot', 'Golf',
'Hotel', 'India', 'Juliet', 'Kilo', 'Lima', 'Mike', 'November',
'Oscar', 'Papa', 'Quebec', 'Romeo', 'Sierra', 'Tango', 'Uniform',
'Victor', 'Whiskey', 'X-ray', 'Yankee', 'Zulu', 'Report', 'Analysis',
'Summary', 'Review', 'Update', 'Draft', 'Final', 'Proposal', 'Budget',
'Timeline', 'Milestone', 'Objective', 'Strategy', 'Initiative',
];
const COLORS = [
'red', 'orange', 'yellow', 'green', 'blue', 'purple', 'pink', 'gray',
];
// Deterministic RNG (mulberry32) so tests are reproducible.
function makeRng(seed: number): () => number {
let s = seed >>> 0;
return () => {
s = (s + 0x6d2b79f5) >>> 0;
let t = s;
t = Math.imul(t ^ (t >>> 15), t | 1);
t ^= t + Math.imul(t ^ (t >>> 7), t | 61);
return ((t ^ (t >>> 14)) >>> 0) / 4294967296;
};
}
function hashSeed(input: string): number {
let h = 2166136261;
for (let i = 0; i < input.length; i++) {
h ^= input.charCodeAt(i);
h = Math.imul(h, 16777619);
}
return h >>> 0;
}
function randomWords(rng: () => number, min: number, max: number): string {
const count = min + Math.floor(rng() * (max - min + 1));
const result: string[] = [];
for (let i = 0; i < count; i++) {
result.push(WORDS[Math.floor(rng() * WORDS.length)]);
}
return result.join(' ');
}
function makeChoices(names: string[]) {
return names.map((name, i) => ({
id: uuid7(),
name,
color: COLORS[i % COLORS.length],
}));
}
function makeStatusChoices() {
const todo = [
{ id: uuid7(), name: 'Not Started', color: 'gray', category: 'todo' },
];
const inProgress = [
{ id: uuid7(), name: 'In Progress', color: 'blue', category: 'inProgress' },
{ id: uuid7(), name: 'In Review', color: 'purple', category: 'inProgress' },
];
const complete = [
{ id: uuid7(), name: 'Done', color: 'green', category: 'complete' },
{ id: uuid7(), name: 'Cancelled', color: 'red', category: 'complete' },
];
const all = [...todo, ...inProgress, ...complete];
return { choices: all, choiceOrder: all.map((c) => c.id) };
}
type PropertyDef = {
name: string;
type: string;
isPrimary?: boolean;
typeOptions?: any;
};
function buildPropertyDefinitions(): PropertyDef[] {
const priorityChoices = makeChoices(['Low', 'Medium', 'High', 'Critical']);
const categoryChoices = makeChoices([
'Engineering',
'Design',
'Marketing',
'Sales',
'Support',
'Operations',
]);
const tagChoices = makeChoices([
'Bug',
'Feature',
'Improvement',
'Documentation',
'Research',
]);
const statusOpts = makeStatusChoices();
return [
{ name: 'Title', type: 'text', isPrimary: true },
{ name: 'Status', type: 'status', typeOptions: statusOpts },
{
name: 'Priority',
type: 'select',
typeOptions: {
choices: priorityChoices,
choiceOrder: priorityChoices.map((c) => c.id),
},
},
{
name: 'Category',
type: 'select',
typeOptions: {
choices: categoryChoices,
choiceOrder: categoryChoices.map((c) => c.id),
},
},
{
name: 'Tags',
type: 'multiSelect',
typeOptions: {
choices: tagChoices,
choiceOrder: tagChoices.map((c) => c.id),
},
},
{
name: 'Due Date',
type: 'date',
typeOptions: { dateFormat: 'YYYY-MM-DD', includeTime: false },
},
{
name: 'Estimate',
type: 'number',
typeOptions: { format: 'plain', precision: 1 },
},
{
name: 'Budget',
type: 'number',
typeOptions: { format: 'currency', precision: 2, currencySymbol: '$' },
},
{ name: 'Approved', type: 'checkbox' },
{ name: 'Website', type: 'url' },
{ name: 'Contact Email', type: 'email' },
{ name: 'Notes', type: 'text' },
{ name: 'Created', type: 'createdAt' },
{ name: 'Last Edited', type: 'lastEditedAt' },
];
}
type CellGenerator = () => unknown;
function buildCellGenerator(
property: any,
rng: () => number,
): CellGenerator | null {
if (SKIP_TYPES.has(property.type)) return null;
const typeOptions = property.type_options ?? property.typeOptions;
switch (property.type) {
case 'text':
return () => randomWords(rng, 2, 6);
case 'number':
return () => Math.round(rng() * 10000 * 100) / 100;
case 'select':
case 'status': {
const choices = typeOptions?.choices ?? [];
if (choices.length === 0) return null;
return () => choices[Math.floor(rng() * choices.length)].id;
}
case 'multiSelect': {
const choices = typeOptions?.choices ?? [];
if (choices.length === 0) return () => [];
return () => {
const count = 1 + Math.floor(rng() * Math.min(3, choices.length));
const shuffled = [...choices].sort(() => rng() - 0.5);
return shuffled.slice(0, count).map((c: any) => c.id);
};
}
case 'date': {
const start = new Date(2020, 0, 1).getTime();
const range = new Date(2026, 0, 1).getTime() - start;
return () => new Date(start + rng() * range).toISOString();
}
case 'checkbox':
return () => rng() > 0.5;
case 'url':
return () => `https://example.com/page/${Math.floor(rng() * 100000)}`;
case 'email':
return () => `user${Math.floor(rng() * 100000)}@example.com`;
default:
return null;
}
}
export async function seedBase(opts: SeedBaseOptions): Promise<SeededBase> {
const { db, workspaceId, spaceId, creatorUserId, rows } = opts;
const baseName =
opts.name ??
`Seed Base ${rows >= 1000 ? `${Math.round(rows / 1000)}K` : `${rows}`} rows`;
const rng = makeRng(hashSeed(`${baseName}:${rows}`));
const baseId = uuid7();
await db
.insertInto('bases')
.values({
id: baseId,
name: baseName,
space_id: spaceId,
workspace_id: workspaceId,
creator_id: creatorUserId,
created_at: new Date(),
updated_at: new Date(),
})
.execute();
const propertyDefs = buildPropertyDefinitions();
let propPosition: string | null = null;
const insertedProperties: any[] = [];
for (const def of propertyDefs) {
propPosition = generateJitteredKeyBetween(propPosition, null);
insertedProperties.push({
id: uuid7(),
base_id: baseId,
name: def.name,
type: def.type,
position: propPosition,
type_options: def.typeOptions ?? null,
is_primary: def.isPrimary ?? false,
workspace_id: workspaceId,
created_at: new Date(),
updated_at: new Date(),
});
}
await db.insertInto('base_properties').values(insertedProperties).execute();
const viewId = uuid7();
await db
.insertInto('base_views')
.values({
id: viewId,
base_id: baseId,
name: 'Table View 1',
type: 'table',
position: generateJitteredKeyBetween(null, null),
config: {},
workspace_id: workspaceId,
creator_id: creatorUserId,
created_at: new Date(),
updated_at: new Date(),
})
.execute();
const byName = new Map(insertedProperties.map((p) => [p.name, p.id]));
const propertyIds: SeededBase['propertyIds'] = {
title: byName.get('Title')!,
status: byName.get('Status')!,
priority: byName.get('Priority')!,
category: byName.get('Category')!,
tags: byName.get('Tags')!,
dueDate: byName.get('Due Date')!,
estimate: byName.get('Estimate')!,
budget: byName.get('Budget')!,
approved: byName.get('Approved')!,
website: byName.get('Website')!,
contactEmail: byName.get('Contact Email')!,
notes: byName.get('Notes')!,
created: byName.get('Created')!,
lastEdited: byName.get('Last Edited')!,
};
const generators: Array<{ propertyId: string; generate: CellGenerator }> = [];
for (const prop of insertedProperties) {
const gen = buildCellGenerator(prop, rng);
if (gen) {
generators.push({ propertyId: prop.id, generate: gen });
}
}
const positions: string[] = new Array(rows);
let lastPosition: string | null = null;
for (let i = 0; i < rows; i++) {
lastPosition = generateJitteredKeyBetween(lastPosition, null);
positions[i] = lastPosition;
}
const BATCH_SIZE = 2000;
for (let batchStart = 0; batchStart < rows; batchStart += BATCH_SIZE) {
const batchEnd = Math.min(batchStart + BATCH_SIZE, rows);
const rowsBatch: any[] = [];
for (let i = batchStart; i < batchEnd; i++) {
const cells: Record<string, unknown> = {};
for (const { propertyId, generate } of generators) {
cells[propertyId] = generate();
}
rowsBatch.push({
id: uuid7(),
base_id: baseId,
cells,
position: positions[i],
creator_id: creatorUserId,
workspace_id: workspaceId,
created_at: new Date(),
updated_at: new Date(),
});
}
await db.insertInto('base_rows').values(rowsBatch).execute();
}
return { baseId, propertyIds };
}
export async function deleteSeededBase(
db: Kysely<any>,
baseId: string,
): Promise<void> {
await db.deleteFrom('base_rows').where('base_id', '=', baseId).execute();
await db.deleteFrom('base_views').where('base_id', '=', baseId).execute();
await db
.deleteFrom('base_properties')
.where('base_id', '=', baseId)
.execute();
await db.deleteFrom('bases').where('id', '=', baseId).execute();
}
@@ -128,6 +128,21 @@ export class BaseRowRepo {
}); });
} }
async countActiveRows(
baseId: string,
opts: WorkspaceOpts,
): Promise<number> {
const db = dbOrTx(this.db, opts.trx);
const row = await db
.selectFrom('baseRows')
.select((eb) => eb.fn.countAll<number>().as('count'))
.where('baseId', '=', baseId)
.where('workspaceId', '=', opts.workspaceId)
.where('deletedAt', 'is', null)
.executeTakeFirst();
return Number(row?.count ?? 0);
}
async getLastPosition( async getLastPosition(
baseId: string, baseId: string,
opts: WorkspaceOpts, opts: WorkspaceOpts,
+15 -266
View File
@@ -3,11 +3,9 @@ import * as dotenv from 'dotenv';
import { Kysely } from 'kysely'; import { Kysely } from 'kysely';
import { PostgresJSDialect } from 'kysely-postgres-js'; import { PostgresJSDialect } from 'kysely-postgres-js';
import postgres from 'postgres'; import postgres from 'postgres';
import { v7 as uuid7 } from 'uuid'; import { seedBase } from '../core/base/query-cache/testing/seed-base';
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
const TOTAL_ROWS = Number(process.env.TOTAL_ROWS) || 1500; const TOTAL_ROWS = Number(process.env.TOTAL_ROWS) || 1500;
const BATCH_SIZE = 2000;
const envFilePath = path.resolve(process.cwd(), '..', '..', '.env'); const envFilePath = path.resolve(process.cwd(), '..', '..', '.env');
dotenv.config({ path: envFilePath }); dotenv.config({ path: envFilePath });
@@ -30,206 +28,6 @@ const db = new Kysely<any>({
}), }),
}); });
const SKIP_TYPES = new Set([
'createdAt',
'lastEditedAt',
'lastEditedBy',
'person',
'file',
]);
const WORDS = [
'Alpha', 'Bravo', 'Charlie', 'Delta', 'Echo', 'Foxtrot', 'Golf',
'Hotel', 'India', 'Juliet', 'Kilo', 'Lima', 'Mike', 'November',
'Oscar', 'Papa', 'Quebec', 'Romeo', 'Sierra', 'Tango', 'Uniform',
'Victor', 'Whiskey', 'X-ray', 'Yankee', 'Zulu', 'Report', 'Analysis',
'Summary', 'Review', 'Update', 'Draft', 'Final', 'Proposal', 'Budget',
'Timeline', 'Milestone', 'Objective', 'Strategy', 'Initiative',
];
const COLORS = [
'red', 'orange', 'yellow', 'green', 'blue', 'purple', 'pink', 'gray',
];
function randomWords(min: number, max: number): string {
const count = min + Math.floor(Math.random() * (max - min + 1));
const result: string[] = [];
for (let i = 0; i < count; i++) {
result.push(WORDS[Math.floor(Math.random() * WORDS.length)]);
}
return result.join(' ');
}
function makeChoices(names: string[], category?: string) {
return names.map((name, i) => ({
id: uuid7(),
name,
color: COLORS[i % COLORS.length],
...(category ? {} : {}),
}));
}
function makeStatusChoices() {
const todo = [{ id: uuid7(), name: 'Not Started', color: 'gray', category: 'todo' }];
const inProgress = [
{ id: uuid7(), name: 'In Progress', color: 'blue', category: 'inProgress' },
{ id: uuid7(), name: 'In Review', color: 'purple', category: 'inProgress' },
];
const complete = [
{ id: uuid7(), name: 'Done', color: 'green', category: 'complete' },
{ id: uuid7(), name: 'Cancelled', color: 'red', category: 'complete' },
];
const all = [...todo, ...inProgress, ...complete];
return { choices: all, choiceOrder: all.map((c) => c.id) };
}
type PropertyDef = {
name: string;
type: string;
isPrimary?: boolean;
typeOptions?: any;
};
function buildPropertyDefinitions(): PropertyDef[] {
const priorityChoices = makeChoices(['Low', 'Medium', 'High', 'Critical']);
const categoryChoices = makeChoices(['Engineering', 'Design', 'Marketing', 'Sales', 'Support', 'Operations']);
const tagChoices = makeChoices(['Bug', 'Feature', 'Improvement', 'Documentation', 'Research']);
const statusOpts = makeStatusChoices();
return [
{ name: 'Title', type: 'text', isPrimary: true },
{ name: 'Status', type: 'status', typeOptions: statusOpts },
{ name: 'Priority', type: 'select', typeOptions: { choices: priorityChoices, choiceOrder: priorityChoices.map((c) => c.id) } },
{ name: 'Category', type: 'select', typeOptions: { choices: categoryChoices, choiceOrder: categoryChoices.map((c) => c.id) } },
{ name: 'Tags', type: 'multiSelect', typeOptions: { choices: tagChoices, choiceOrder: tagChoices.map((c) => c.id) } },
{ name: 'Due Date', type: 'date', typeOptions: { dateFormat: 'YYYY-MM-DD', includeTime: false } },
{ name: 'Estimate', type: 'number', typeOptions: { format: 'plain', precision: 1 } },
{ name: 'Budget', type: 'number', typeOptions: { format: 'currency', precision: 2, currencySymbol: '$' } },
{ name: 'Approved', type: 'checkbox' },
{ name: 'Website', type: 'url' },
{ name: 'Contact Email', type: 'email' },
{ name: 'Notes', type: 'text' },
{ name: 'Created', type: 'createdAt' },
{ name: 'Last Edited', type: 'lastEditedAt' },
];
}
type CellGenerator = () => unknown;
function buildCellGenerator(property: any): CellGenerator | null {
if (SKIP_TYPES.has(property.type)) return null;
const typeOptions = property.type_options;
switch (property.type) {
case 'text':
return () => randomWords(2, 6);
case 'number':
return () => Math.round(Math.random() * 10000 * 100) / 100;
case 'select':
case 'status': {
const choices = typeOptions?.choices ?? [];
if (choices.length === 0) return null;
return () => choices[Math.floor(Math.random() * choices.length)].id;
}
case 'multiSelect': {
const choices = typeOptions?.choices ?? [];
if (choices.length === 0) return () => [];
return () => {
const count = 1 + Math.floor(Math.random() * Math.min(3, choices.length));
const shuffled = [...choices].sort(() => Math.random() - 0.5);
return shuffled.slice(0, count).map((c: any) => c.id);
};
}
case 'date': {
const start = new Date(2020, 0, 1).getTime();
const range = new Date(2026, 0, 1).getTime() - start;
return () => new Date(start + Math.random() * range).toISOString();
}
case 'checkbox':
return () => Math.random() > 0.5;
case 'url':
return () => `https://example.com/page/${Math.floor(Math.random() * 100000)}`;
case 'email':
return () => `user${Math.floor(Math.random() * 100000)}@example.com`;
default:
return null;
}
}
async function createBase(workspaceId: string, spaceId: string, creatorId: string | null): Promise<string> {
const baseId = uuid7();
const rowCountLabel = TOTAL_ROWS >= 1000 ? `${Math.round(TOTAL_ROWS / 1000)}K` : `${TOTAL_ROWS}`;
const baseName = `Seed Base ${rowCountLabel} rows`;
await db.insertInto('bases').values({
id: baseId,
name: baseName,
space_id: spaceId,
workspace_id: workspaceId,
creator_id: creatorId,
created_at: new Date(),
updated_at: new Date(),
}).execute();
console.log(`Created base: ${baseName}`);
console.log(`Base ID: ${baseId}\n`);
// Create properties
const propertyDefs = buildPropertyDefinitions();
let propPosition: string | null = null;
const insertedProperties: any[] = [];
for (const def of propertyDefs) {
propPosition = generateJitteredKeyBetween(propPosition, null);
const prop = {
id: uuid7(),
base_id: baseId,
name: def.name,
type: def.type,
position: propPosition,
type_options: def.typeOptions ?? null,
is_primary: def.isPrimary ?? false,
workspace_id: workspaceId,
created_at: new Date(),
updated_at: new Date(),
};
insertedProperties.push(prop);
}
await db.insertInto('base_properties').values(insertedProperties).execute();
console.log(`Created ${insertedProperties.length} properties:`);
for (const p of insertedProperties) {
console.log(` - ${p.name} (${p.type})${p.is_primary ? ' [primary]' : ''}${SKIP_TYPES.has(p.type) ? ' [system]' : ''}`);
}
// Create default view
const viewId = uuid7();
await db.insertInto('base_views').values({
id: viewId,
base_id: baseId,
name: 'Table View 1',
type: 'table',
position: generateJitteredKeyBetween(null, null),
config: {},
workspace_id: workspaceId,
creator_id: creatorId,
created_at: new Date(),
updated_at: new Date(),
}).execute();
console.log(`Created view: Table View 1\n`);
return baseId;
}
async function main() { async function main() {
const spaceId = '019c69a3-dd47-7014-8b87-ec8f167577ee'; const spaceId = '019c69a3-dd47-7014-8b87-ec8f167577ee';
@@ -247,75 +45,26 @@ async function main() {
.limit(1) .limit(1)
.executeTakeFirst(); .executeTakeFirst();
const creatorId = user?.id ?? null; const creatorUserId = user?.id ?? null;
console.log(`Workspace: ${workspaceId}`); console.log(`Workspace: ${workspaceId}`);
console.log(`Space: ${spaceId}`); console.log(`Space: ${spaceId}`);
console.log(`Creator: ${creatorId ?? '(none)'}\n`); console.log(`Creator: ${creatorUserId ?? '(none)'}\n`);
// Create the base with properties and view
const baseId = await createBase(workspaceId, spaceId, creatorId);
// Load the created properties for cell generation
const properties = await db
.selectFrom('base_properties')
.selectAll()
.where('base_id', '=', baseId)
.execute();
const generators: Array<{ propertyId: string; generate: CellGenerator }> = [];
for (const prop of properties) {
const gen = buildCellGenerator(prop);
if (gen) {
generators.push({ propertyId: prop.id, generate: gen });
}
}
console.log(`Generating ${TOTAL_ROWS.toLocaleString()} positions...`);
let lastPosition: string | null = null;
const positions: string[] = new Array(TOTAL_ROWS);
for (let i = 0; i < TOTAL_ROWS; i++) {
lastPosition = generateJitteredKeyBetween(lastPosition, null);
positions[i] = lastPosition;
}
console.log(`Positions generated (last: ${positions[positions.length - 1]})\n`);
const startTime = Date.now(); const startTime = Date.now();
const totalBatches = Math.ceil(TOTAL_ROWS / BATCH_SIZE); const { baseId } = await seedBase({
db,
for (let batchStart = 0; batchStart < TOTAL_ROWS; batchStart += BATCH_SIZE) { workspaceId,
const batchEnd = Math.min(batchStart + BATCH_SIZE, TOTAL_ROWS); spaceId,
const rows: any[] = []; creatorUserId,
rows: TOTAL_ROWS,
for (let i = batchStart; i < batchEnd; i++) { });
const cells: Record<string, unknown> = {};
for (const { propertyId, generate } of generators) {
cells[propertyId] = generate();
}
rows.push({
id: uuid7(),
base_id: baseId,
cells,
position: positions[i],
creator_id: creatorId,
workspace_id: workspaceId,
created_at: new Date(),
updated_at: new Date(),
});
}
await db.insertInto('base_rows').values(rows).execute();
const batchNum = Math.floor(batchStart / BATCH_SIZE) + 1;
const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`Batch ${batchNum}/${totalBatches} inserted (${batchEnd.toLocaleString()} rows, ${elapsed}s elapsed)`);
}
const totalElapsed = ((Date.now() - startTime) / 1000).toFixed(1); const totalElapsed = ((Date.now() - startTime) / 1000).toFixed(1);
console.log(`\nDone. Inserted ${TOTAL_ROWS.toLocaleString()} rows in ${totalElapsed}s`);
console.log(`\nBase ID: ${baseId}`); console.log(
`Inserted ${TOTAL_ROWS.toLocaleString()} rows in ${totalElapsed}s`,
);
console.log(`Base ID: ${baseId}`);
await db.destroy(); await db.destroy();
process.exit(0); process.exit(0);