mirror of
https://github.com/docmost/docmost.git
synced 2026-05-10 00:13:36 +08:00
Compare commits
52 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 123771e841 | |||
| 8c21675a75 | |||
| 02a78b2ec7 | |||
| dbc1eb539c | |||
| 38cd94b2d7 | |||
| 4437dcbb62 | |||
| 568d94be1f | |||
| f12a0675ea | |||
| 838d8892f0 | |||
| 08711791d6 | |||
| b04bcb5b0c | |||
| 709d927544 | |||
| 5b96dfe6c9 | |||
| 17db634029 | |||
| 5ebab5cd9e | |||
| 2d9e060d9e | |||
| b2ed8f9936 | |||
| 7192b4bacb | |||
| cfc50b7cae | |||
| f819f633c9 | |||
| db1b1464e2 | |||
| cc47a6d65c | |||
| 378d17350c | |||
| eea989260a | |||
| fc08cffd37 | |||
| fde0ccb3c7 | |||
| e663d7eecf | |||
| 96e875f1de | |||
| 6544ff6d38 | |||
| 7ca712c9ab | |||
| a798397af0 | |||
| 9ba6459427 | |||
| 14827ec6a0 | |||
| c931fa5ec9 | |||
| 7e07d77510 | |||
| 02c3bdf028 | |||
| 55feb01249 | |||
| 4636af3870 | |||
| c9adf84260 | |||
| 4f38c61725 | |||
| df22efb290 | |||
| 7534b44e6e | |||
| cf6b48cd58 | |||
| 45000bbd8b | |||
| 91ad3de258 | |||
| b28597125d | |||
| a9db3ef008 | |||
| 574c5316f0 | |||
| 3af2db7a8b | |||
| f181c6d9e8 | |||
| 8ac4c97c98 | |||
| abd42fd007 |
File diff suppressed because it is too large
Load Diff
@@ -1,309 +0,0 @@
|
||||
# Base `page` Property Type — Design Spec
|
||||
|
||||
**Date:** 2026-04-20
|
||||
**Status:** Draft
|
||||
**Feature area:** `apps/server/src/core/base`, `apps/client/src/features/base`, `apps/server/src/core/page`
|
||||
|
||||
## Goal
|
||||
|
||||
Add a new base property type `page` that lets a user search for and link **one existing page** per cell. Modeled on how the editor's `@` page-mention works — the picker searches existing pages workspace-wide (with current-space prioritized) and the cell renders a live pill with the page's icon and title. No page is auto-created from the picker; users can only link pages that already exist.
|
||||
|
||||
Why: today users who want a page-reference column would have to paste a URL into a `url` cell, which loses the icon + title and doesn't validate. We also want to avoid the Focalboard-style pattern of auto-creating a page-row per table row, which would bloat the pages tree.
|
||||
|
||||
## Non-goals (v1)
|
||||
|
||||
- **Multiple pages per cell.** Single page only. Forward-compatible: the schema widens trivially to `z.union([z.uuid(), z.array(z.uuid())])` + an `allowMultiple` type option later, with zero data migration (see "Future extension" below).
|
||||
- **Sorting by page title.** Would require a JOIN against `pages` in the row-list query; skip in v1. Filter suffices.
|
||||
- **Creating pages from within the picker.**
|
||||
- **Cross-workspace page linking.**
|
||||
- **Rich previews / hover cards** showing page excerpts — pill-only.
|
||||
- **Confluence-style section grouping** in the property type picker (e.g. the "Page and live doc" section in the screenshot). Flat list for v1; grouping is a separate polish task.
|
||||
|
||||
## UX overview
|
||||
|
||||
### Picker (edit mode)
|
||||
|
||||
- Popover modeled on [cell-person.tsx](../../../apps/client/src/features/base/components/cells/cell-person.tsx) but stripped for single-select. `width=300`, `position="bottom-start"`, `trapFocus`.
|
||||
- Top: search input, auto-focused. If a page is currently linked, a removable "tag" for it sits above the search (same shape as `personTag`).
|
||||
- Body: results list (max 25), fed by `searchSuggestions({ query, includePages: true, spaceId: base.spaceId, limit: 25 })` — reuses the existing suggestion endpoint, which prioritizes `spaceId` results.
|
||||
- Each row: `{icon or IconFileDescription} {title}` + muted space name on the right (so cross-space picks are visually distinct).
|
||||
- Empty-query state: if pulling recent-pages is easy to plug in, show recent pages; otherwise "Type to search…" hint.
|
||||
- Click or Enter on a highlighted row → `onCommit(pageId)`, popover closes.
|
||||
- Esc / click-outside → `onCancel`.
|
||||
- Clicking the "Remove" affordance on the current tag → `onCommit(null)`.
|
||||
- Keyboard: reuse `useListKeyboardNav`.
|
||||
|
||||
### View mode
|
||||
|
||||
- Empty cell → empty placeholder (same class as `cellClasses.emptyValue`).
|
||||
- Resolved page → pill `{icon or IconFileDescription} {title}`, anchor that navigates to `buildPageUrl(space.slug, slugId, title)` using the helper that [mention-view.tsx](../../../apps/client/src/features/editor/components/mention/mention-view.tsx) already uses.
|
||||
- Unresolved (deleted or viewer has no access) → greyed pill "Page not found", no link, `aria-disabled`.
|
||||
- Single click on the pill = navigate. Double-click on the cell = open picker (same rule grid-cell applies to other types).
|
||||
|
||||
### Sort / filter UI
|
||||
|
||||
- [view-sort-config.tsx](../../../apps/client/src/features/base/components/views/view-sort-config.tsx): exclude `page` properties from the sortable set.
|
||||
- [view-filter-config.tsx](../../../apps/client/src/features/base/components/views/view-filter-config.tsx): filter editor branch for `page` with operators `isEmpty`, `isNotEmpty`, `any`, `none`. The value picker reuses the same search dropdown from the cell picker.
|
||||
|
||||
## Data model
|
||||
|
||||
### Cell value
|
||||
|
||||
- **Stored shape:** `string` (page UUID) or `null`. Parallels `person` in single mode.
|
||||
- **Example:** `{ "01998b7e-...": "01998b80-..." }` — property UUID → page UUID.
|
||||
|
||||
### Property type options
|
||||
|
||||
- **v1:** empty `{}` (reuse `emptyTypeOptionsSchema`).
|
||||
- **Future:** `{ allowMultiple?: boolean }`.
|
||||
|
||||
### Schema additions
|
||||
|
||||
**Server — [base.schemas.ts](../../../apps/server/src/core/base/base.schemas.ts):**
|
||||
|
||||
```ts
|
||||
export const BasePropertyType = {
|
||||
// ...existing entries...
|
||||
PAGE: 'page',
|
||||
} as const;
|
||||
|
||||
// typeOptionsSchemaMap
|
||||
[BasePropertyType.PAGE]: emptyTypeOptionsSchema,
|
||||
|
||||
// cellValueSchemaMap
|
||||
[BasePropertyType.PAGE]: z.uuid(),
|
||||
```
|
||||
|
||||
**Client — [base.types.ts](../../../apps/client/src/features/base/types/base.types.ts):**
|
||||
|
||||
```ts
|
||||
export type BasePropertyType = ... | 'page';
|
||||
export type PageTypeOptions = Record<string, never>;
|
||||
```
|
||||
|
||||
### Property kind & engine
|
||||
|
||||
**[engine/kinds.ts](../../../apps/server/src/core/base/engine/kinds.ts):**
|
||||
|
||||
```ts
|
||||
export const PropertyKind = {
|
||||
// ...existing...
|
||||
PAGE: 'page',
|
||||
} as const;
|
||||
|
||||
// propertyKind()
|
||||
case BasePropertyType.PAGE:
|
||||
return PropertyKind.PAGE;
|
||||
```
|
||||
|
||||
**[engine/predicate.ts](../../../apps/server/src/core/base/engine/predicate.ts):** new `pageCondition()` handler — shape follows `selectCondition()` (single UUID stored as text):
|
||||
|
||||
- `isEmpty` / `isNotEmpty` → `textCell` is null or empty
|
||||
- `eq` / `neq` → text equality / inequality (null-safe for `neq`)
|
||||
- `any` → `textCell IN (...)`
|
||||
- `none` → `textCell NOT IN (...)` or null
|
||||
|
||||
Wired into the `switch (kind)` in `buildCondition`:
|
||||
```ts
|
||||
case PropertyKind.PAGE:
|
||||
return pageCondition(eb, cond);
|
||||
```
|
||||
|
||||
**[engine/sort.ts](../../../apps/server/src/core/base/engine/sort.ts):** no new branch. `page` falls into the default text-sentinel path (sorts by raw UUID string, which is unhelpful but harmless — the sort UI won't expose this type in v1).
|
||||
|
||||
### Type conversion
|
||||
|
||||
**[base.schemas.ts `CellConversionContext`](../../../apps/server/src/core/base/base.schemas.ts:191):** add a new field:
|
||||
|
||||
```ts
|
||||
export type CellConversionContext = {
|
||||
fromTypeOptions?: unknown;
|
||||
userNames?: Map<string, string>;
|
||||
attachmentNames?: Map<string, string>;
|
||||
pageTitles?: Map<string, string>; // NEW
|
||||
};
|
||||
```
|
||||
|
||||
**[base-type-conversion.task.ts](../../../apps/server/src/core/base/tasks/base-type-conversion.task.ts):** when `fromType === 'page'`, batch-load titles via the same page repo path used by the new resolver endpoint (see below) and populate `ctx.pageTitles`.
|
||||
|
||||
**`attemptCellConversion` branches:**
|
||||
- `page → text`: resolve `ctx.pageTitles.get(uuid)` → title (or `""` if missing).
|
||||
- `page → *` (anything else): return `{converted: true, value: null}`.
|
||||
- `* → page`: return `{converted: true, value: null}` (free text or other IDs can't be coerced to a valid page UUID).
|
||||
|
||||
## Server: page resolver endpoint
|
||||
|
||||
New endpoint for cell hydration on the client. Reusing `/pages/info` is inappropriate — it returns full page content and is one-at-a-time.
|
||||
|
||||
### `POST /bases/pages/resolve`
|
||||
|
||||
**Request:**
|
||||
```ts
|
||||
{ pageIds: string[] } // 1 <= length <= 100, enforced server-side; 400 on violation
|
||||
```
|
||||
|
||||
**Response:**
|
||||
```ts
|
||||
{
|
||||
items: Array<{
|
||||
id: string;
|
||||
slugId: string;
|
||||
title: string | null;
|
||||
icon: string | null;
|
||||
spaceId: string;
|
||||
space: { id: string; slug: string; name: string };
|
||||
}>;
|
||||
}
|
||||
```
|
||||
|
||||
### Behavior
|
||||
|
||||
1. Deduplicate input IDs.
|
||||
2. Select from `pages` where `id IN (...)` AND `deletedAt IS NULL` AND `workspaceId = current`.
|
||||
3. Filter the result set through `pagePermissionRepo.filterAccessiblePageIds({ pageIds, userId })` — same mechanism used by [search.service.ts:131-139](../../../apps/server/src/core/search/search.service.ts).
|
||||
4. Join `spaces` to include `space.slug` and `space.name` for navigation.
|
||||
5. Silently omit any ID the user can't see (deleted, restricted, cross-workspace). The client treats any requested ID missing from `items` as "Page not found".
|
||||
|
||||
### Code layout
|
||||
|
||||
- **Controller:** add method to [base.controller.ts](../../../apps/server/src/core/base/controllers/base.controller.ts) at path `@Post('pages/resolve')`. Guarded by the same `JwtAuthGuard` + workspace check the rest of `/bases/*` uses.
|
||||
- **Service:** new file `apps/server/src/core/base/services/base-page-resolver.service.ts` with `resolvePagesForBase(pageIds, workspaceId, userId)`. Keeps the coupling to `PageRepo` + `PagePermissionRepo` isolated to this one file.
|
||||
- **Module:** wire the new service into [base.module.ts](../../../apps/server/src/core/base/base.module.ts). `PageRepo` + `PagePermissionRepo` are already shared modules.
|
||||
|
||||
## Client: cell component & resolver
|
||||
|
||||
### Batch resolver hook
|
||||
|
||||
New file `apps/client/src/features/base/queries/base-page-resolver-query.ts`:
|
||||
|
||||
```ts
|
||||
export function useResolvedPages(pageIds: string[]): Map<string, ResolvedPage | null>
|
||||
```
|
||||
|
||||
- Deduplicate + sort IDs to form a stable React Query key.
|
||||
- Fetch `POST /bases/pages/resolve` with `{ pageIds }`.
|
||||
- Return a `Map` keyed by every requested ID — `null` for any ID absent from the server response.
|
||||
- `staleTime: 30_000`, `gcTime: 5 * 60_000`.
|
||||
- Realtime invalidation: listen for existing page-level websocket events (rename, delete) and invalidate the query when a touched ID intersects our key. Exact event names to be surveyed during plan writing.
|
||||
|
||||
### Cell component
|
||||
|
||||
New file `apps/client/src/features/base/components/cells/cell-page.tsx`:
|
||||
|
||||
```ts
|
||||
type CellPageProps = {
|
||||
value: unknown;
|
||||
property: IBaseProperty;
|
||||
rowId: string;
|
||||
isEditing: boolean;
|
||||
onCommit: (value: unknown) => void;
|
||||
onCancel: () => void;
|
||||
};
|
||||
```
|
||||
|
||||
**Behavior:**
|
||||
- Parse value: accept `string` only (ignore arrays — they'd be from a future multi mode that we drop until upgraded).
|
||||
- `useResolvedPages([value])` — yes even for single lookups; the hook dedupes internally so multiple cells sharing the same page ID hit one request.
|
||||
- View mode: resolved → pill with icon+title, anchor to `buildPageUrl`. Unresolved → greyed "Page not found".
|
||||
- Edit mode: popover picker (see UX overview). Search via existing `searchSuggestions`.
|
||||
|
||||
Wire into [grid-cell.tsx](../../../apps/client/src/features/base/components/grid/grid-cell.tsx):
|
||||
|
||||
```ts
|
||||
const cellComponents = {
|
||||
// ...existing...
|
||||
page: CellPage,
|
||||
};
|
||||
```
|
||||
|
||||
### Property type picker
|
||||
|
||||
[property-type-picker.tsx](../../../apps/client/src/features/base/components/property/property-type-picker.tsx): append one entry (after `file`):
|
||||
|
||||
```ts
|
||||
{ type: "page", icon: IconFileDescription, labelKey: "Page" },
|
||||
```
|
||||
|
||||
### Filter editor
|
||||
|
||||
[view-filter-config.tsx](../../../apps/client/src/features/base/components/views/view-filter-config.tsx): new branch for `page`:
|
||||
- Operators: `isEmpty`, `isNotEmpty`, `any`, `none`.
|
||||
- Value picker for `any`/`none`: reuses the same `searchSuggestions`-backed search dropdown from the cell picker — user picks one or more pages as filter operands.
|
||||
|
||||
### Sort editor
|
||||
|
||||
[view-sort-config.tsx](../../../apps/client/src/features/base/components/views/view-sort-config.tsx): exclude `page` from the list of sortable property types.
|
||||
|
||||
## Testing
|
||||
|
||||
### Server — unit
|
||||
|
||||
- **Schema:** `validateCellValue('page', uuid)` passes; with garbage string / number → fails; with `null` → passes (null = empty).
|
||||
- **Conversion:**
|
||||
- `attemptCellConversion('page', 'text', uuid, { pageTitles: Map<uuid,title> })` → resolved title.
|
||||
- Same call with empty `pageTitles` → `""`.
|
||||
- `page → number/date/select/…` → `{converted: true, value: null}`.
|
||||
- `text → page` with any string input → `{converted: true, value: null}`.
|
||||
- **Predicate:** for each operator (`isEmpty`, `isNotEmpty`, `eq`, `neq`, `any`, `none`), `pageCondition()` returns the expected Kysely expression shape.
|
||||
|
||||
### Server — integration
|
||||
|
||||
- **Resolver endpoint `POST /bases/pages/resolve`:**
|
||||
- valid IDs in an accessible space → present in `items`
|
||||
- deleted pages (trash) → absent
|
||||
- pages in a space the user isn't a member of → absent
|
||||
- pages in another workspace → absent
|
||||
- empty array → 400
|
||||
- array length > 100 → 400
|
||||
- **Row CRUD:** create a property of type `page`, write a cell with a UUID, read back → round-trip shape is `string`.
|
||||
- **View filter:** create a view config with `{ op: 'any', propertyId, value: [uuidA, uuidB] }`, hit row-list, verify only matching rows returned.
|
||||
|
||||
### Client — unit (Vitest + React Testing Library)
|
||||
|
||||
- `cell-page.test.tsx`:
|
||||
- view mode with resolved page → renders pill with icon + title and an `<a>` to the computed URL
|
||||
- view mode with unresolved page (null in resolver map) → renders greyed "Page not found", no `<a>`
|
||||
- double-click opens picker
|
||||
- Enter on highlighted result commits `pageId`
|
||||
- Esc cancels
|
||||
- Remove tag button commits `null`
|
||||
- `base-page-resolver-query.test.ts`:
|
||||
- dedupes IDs
|
||||
- stable query key across re-renders with same set
|
||||
- missing IDs render as `null` in the returned map
|
||||
|
||||
### Manual QA checklist
|
||||
|
||||
- Link a page in the same space.
|
||||
- Link a page in another space → pill shows, picker shows muted space-name hint.
|
||||
- Remove link → cell empties.
|
||||
- Delete linked page (via trash) → cell flips to "Page not found" on next resolver refetch.
|
||||
- Viewer loses space access → same "Page not found" fallback.
|
||||
- Rename linked page → within ≤30s (staleTime) the pill reflects the new title; realtime event should also trigger refetch.
|
||||
- Filter: `isEmpty`, `isNotEmpty`, `any` (multi-select), `none`.
|
||||
- Conversion `page → text` populates cells with page titles.
|
||||
- Conversion `text → page` wipes cells.
|
||||
|
||||
## Rollout
|
||||
|
||||
- **No DB migration.** All changes are code-only: new enum value, new cell-value validator entry, new engine kind branch, new endpoint.
|
||||
- **No feature flag.** The type appears in the picker as soon as the build ships. Backwards-compatible since `'page'` is a new type identifier.
|
||||
- Existing bases continue to work unchanged.
|
||||
|
||||
## Risks & open questions
|
||||
|
||||
- **30s staleTime.** Renames take up to 30s to propagate without realtime invalidation. The realtime hook should shrink this to near-zero in practice; verify in QA. If it feels slow, drop `staleTime` to `0` and rely solely on realtime + refetch-on-window-focus.
|
||||
- **"Page not found" label.** i18n-friendly; run through the translation pipeline. Consider whether to differentiate deleted vs. restricted — current answer: no, one label covers both and matches Confluence's behavior.
|
||||
- **Cross-space name exposure.** The picker surfaces the space name of pages the user can access cross-space. This is already exposed via the existing page-mention flow, so no new exposure, but flag in review.
|
||||
|
||||
## Future extension (multiple pages per cell)
|
||||
|
||||
When `allowMultiple` lands:
|
||||
|
||||
1. Widen cell-value schema: `z.uuid()` → `z.union([z.uuid(), z.array(z.uuid())])`. Existing single-UUID cells continue to validate.
|
||||
2. Add `allowMultiple` boolean to `pageTypeOptionsSchema` (default `false` for existing properties).
|
||||
3. In [predicate.ts](../../../apps/server/src/core/base/engine/predicate.ts), branch `pageCondition` on `allowMultiple`: `true` → reuse `arrayOfIdsCondition`; `false` → keep the current text-based path.
|
||||
4. Client cell normalizes on read (`Array.isArray(value) ? value : typeof value === 'string' ? [value] : []`), mirrors [cell-person.tsx:33](../../../apps/client/src/features/base/components/cells/cell-person.tsx).
|
||||
5. No data writes required for existing cells.
|
||||
|
||||
This spec leaves room for that change without locking the storage shape.
|
||||
@@ -1,479 +0,0 @@
|
||||
# Base View Draft (Local-First Filter & Sort) — Design Spec
|
||||
|
||||
**Date:** 2026-04-20
|
||||
**Status:** Draft
|
||||
**Feature area:** `apps/client/src/features/base` (client-only)
|
||||
|
||||
## Goal
|
||||
|
||||
Make filter and sort changes on a base view **local-first**: they apply instantly for the editing user, are scoped to their own browser/profile, and never touch the server baseline until the user explicitly clicks "Save for everyone". A banner at the top of the table surfaces the draft state and lets the user either promote the draft to the shared baseline or discard it.
|
||||
|
||||
This removes the current Notion-unlike behavior where every filter/sort tweak is auto-persisted and immediately inflicted on every teammate viewing the same view.
|
||||
|
||||
## Non-goals (v1)
|
||||
|
||||
- **Column layout in draft mode.** Column visibility, order, and widths continue to flow through the existing debounced `persistViewConfig` path in [use-base-table.ts:371-396](../../../apps/client/src/features/base/hooks/use-base-table.ts). No draft behavior for them. (Listed as a future extension.)
|
||||
- **Server-side per-user drafts.** localStorage only. A user clearing their browser storage, switching devices, or using a different browser profile loses drafts — by design.
|
||||
- **"Save as new view".** The screenshot hints at a dropdown caret next to the Save button for a "save as new view" split-action. Not in v1.
|
||||
- **Kanban / calendar.** Only the `table` view type exists today; spec scopes to it but the hook is type-agnostic and will apply trivially when other view types land.
|
||||
- **Automatic garbage collection of stale drafts.** Drafts persist indefinitely until the user resets or saves. No TTL, no eager cleanup when baseline values match the draft.
|
||||
- **Conflict UI.** If another user writes a new baseline while I have local drafts, my draft silently wins on my client. No "baseline changed" warning.
|
||||
|
||||
## UX overview
|
||||
|
||||
### Draft banner
|
||||
|
||||
Placement: **between** the page title and [BaseToolbar](../../../apps/client/src/features/base/components/base-toolbar.tsx), inside [base-table.tsx](../../../apps/client/src/features/base/components/base-table.tsx) above the `<BaseToolbar />` node (around [base-table.tsx:192](../../../apps/client/src/features/base/components/base-table.tsx)). The banner is part of the table's own layout, not a workspace-level chrome element, because it's tied to a specific view.
|
||||
|
||||
Render condition: `isDirty === true` (see "Dirty check").
|
||||
|
||||
Layout (match the reference screenshot):
|
||||
|
||||
- Mantine `<Paper withBorder radius="sm" px="md" py="xs">` with a soft background (`bg="yellow.0"` or `bg="orange.0"` depending on theme palette — pick whichever tolerates dark mode) and a small info icon on the left.
|
||||
- Left region: short message — `t("Filter and sort changes are visible only to you.")`.
|
||||
- Right region (a `<Group gap="sm">`):
|
||||
- `<Button variant="subtle" color="gray" size="xs">{t("Reset")}</Button>` — underline-on-hover "text link" feel; wipes the draft.
|
||||
- `<Button variant="filled" size="xs">{t("Save for everyone")}</Button>` — primary accent (project's default theme color — orange in the screenshot maps to Mantine's configured `primaryColor`, so `color` is omitted and the theme default is used).
|
||||
- The "Save for everyone" button is **omitted entirely** for users without edit permission (see "Permission gating"). "Reset" always shows.
|
||||
- The banner never animates in/out on every keystroke — it only appears/disappears when `isDirty` flips. Add a Mantine `<Transition mounted={isDirty} transition="slide-down" duration={120}>` wrap if the flip is jarring; otherwise mount unconditionally with a `{isDirty && ...}` guard.
|
||||
|
||||
### Filter/sort editors in draft mode
|
||||
|
||||
No UI affordance changes inside the filter or sort popovers themselves. They keep the same open-on-click, add/remove/edit flow. The only behavioral change is that their `onChange` callback writes to the draft store rather than firing `updateView` — completely transparent to the editor components.
|
||||
|
||||
### Reset behavior
|
||||
|
||||
Click Reset → the draft hook removes its localStorage entry → the table re-renders reading filter/sorts from `activeView.config` (the server baseline). Any currently-open filter/sort popover closes on outside click as usual; if it's open when the user clicks Reset, the next render shows the baseline values. No notification — the banner disappearing is sufficient feedback.
|
||||
|
||||
### Save for everyone
|
||||
|
||||
Click Save → call the existing `useUpdateViewMutation` from [base-view-query.ts:43-112](../../../apps/client/src/features/base/queries/base-view-query.ts) with `{ viewId, baseId, config: { ...serverBaseline, filter: draft.filter, sorts: draft.sorts } }`. On success, clear the localStorage key and show a Mantine notification `t("View updated for everyone")`. On error, keep the draft; the mutation already wires the error toast.
|
||||
|
||||
### Permission gating
|
||||
|
||||
A user can edit this base iff their space membership grants `SpaceCaslAction.Edit, SpaceCaslSubject.Base` — the same check the server enforces in [base-view.controller.ts:68](../../../apps/server/src/core/base/controllers/base-view.controller.ts). Viewers still get local drafts (the entire point is that local changes don't require edit permission), but their "Save for everyone" button is hidden.
|
||||
|
||||
**Client caveat:** [permissions.type.ts](../../../apps/client/src/features/space/permissions/permissions.type.ts) currently only exports `Settings`, `Member`, and `Page` subjects. The server enum has `Base` but the client enum doesn't. The spec adds `Base = "base"` to `SpaceCaslSubject` and widens the `SpaceAbility` union — that's a one-line change plus import fix.
|
||||
|
||||
## Data model
|
||||
|
||||
### localStorage key
|
||||
|
||||
```
|
||||
docmost:base-view-draft:v1:{userId}:{baseId}:{viewId}
|
||||
```
|
||||
|
||||
- Namespace prefix `docmost:base-view-draft:` keeps us from colliding with other consumers.
|
||||
- `v1` is the schema version so a future breaking change can shed old entries by skipping.
|
||||
- `{userId}` scopes drafts so a shared-device login-swap doesn't leak drafts across accounts. `userId` comes from the existing `useCurrentUser()` hook (returns `{ data: ICurrentUser }` — read `user?.user.id`), the same helper used by other authenticated client code.
|
||||
- `{baseId}` and `{viewId}` together uniquely identify which table state the draft applies to.
|
||||
|
||||
### Value shape
|
||||
|
||||
```ts
|
||||
// apps/client/src/features/base/types/base.types.ts (additive)
|
||||
export type BaseViewDraft = {
|
||||
filter?: FilterGroup;
|
||||
sorts?: ViewSortConfig[];
|
||||
updatedAt: string; // ISO timestamp, written on each put — used only for diagnostics
|
||||
};
|
||||
```
|
||||
|
||||
Both `filter` and `sorts` are optional, independently. An absent field means "inherit baseline for that axis". That matters because a user who's only dirtied sorts but not filters should see the baseline filter unchanged if the baseline's filter later shifts.
|
||||
|
||||
Serialized as JSON by Jotai's `atomWithStorage` (which JSON-stringifies on write and parses on read). No schema validation on read — if the parse fails or the shape looks wrong, Jotai yields `null` and the hook falls back to baseline.
|
||||
|
||||
## Client architecture
|
||||
|
||||
### Storage atom family
|
||||
|
||||
**File:** `apps/client/src/features/base/atoms/view-draft-atom.ts`
|
||||
|
||||
Follow the existing Jotai storage pattern in [home-tab-atom.ts](../../../apps/client/src/features/home/atoms/home-tab-atom.ts) and [auth-tokens-atom.ts](../../../apps/client/src/features/auth/atoms/auth-tokens-atom.ts) — `atomWithStorage` is the codebase convention for localStorage-backed state. Since our key is dynamic per (user, base, view), pair it with `atomFamily` from `jotai/utils`:
|
||||
|
||||
```ts
|
||||
import { atomFamily, atomWithStorage } from "jotai/utils";
|
||||
import { BaseViewDraft } from "@/features/base/types/base.types";
|
||||
|
||||
export type ViewDraftKey = {
|
||||
userId: string;
|
||||
baseId: string;
|
||||
viewId: string;
|
||||
};
|
||||
|
||||
const keyFor = (k: ViewDraftKey) =>
|
||||
`docmost:base-view-draft:v1:${k.userId}:${k.baseId}:${k.viewId}`;
|
||||
|
||||
export const viewDraftAtomFamily = atomFamily(
|
||||
(k: ViewDraftKey) =>
|
||||
atomWithStorage<BaseViewDraft | null>(keyFor(k), null),
|
||||
(a, b) =>
|
||||
a.userId === b.userId && a.baseId === b.baseId && a.viewId === b.viewId,
|
||||
);
|
||||
```
|
||||
|
||||
`atomWithStorage` handles JSON serialization, cross-tab sync via the `storage` event, and SSR-safe lazy reads out of the box — no hand-rolled `localStorage.getItem/setItem` or `window.addEventListener("storage", ...)` needed. The comparator passed as `atomFamily`'s second argument ensures the same (user, base, view) triple always resolves to the same atom instance, so React Query-style object identity issues don't cause atoms to be recreated per render.
|
||||
|
||||
### Hook: `useViewDraft`
|
||||
|
||||
**File:** `apps/client/src/features/base/hooks/use-view-draft.ts`
|
||||
|
||||
Thin wrapper that binds the atom family to the rendering layer, adds the passthrough-when-undefined guard, and derives `effectiveFilter` / `effectiveSorts` / `isDirty` / `buildPromotedConfig` from the atom's value:
|
||||
|
||||
```ts
|
||||
export type ViewDraftState = {
|
||||
draft: BaseViewDraft | null;
|
||||
effectiveFilter: FilterGroup | undefined;
|
||||
effectiveSorts: ViewSortConfig[] | undefined;
|
||||
isDirty: boolean;
|
||||
setFilter: (filter: FilterGroup | undefined) => void;
|
||||
setSorts: (sorts: ViewSortConfig[] | undefined) => void;
|
||||
reset: () => void;
|
||||
buildPromotedConfig: (baseline: ViewConfig) => ViewConfig;
|
||||
};
|
||||
|
||||
export function useViewDraft(args: {
|
||||
userId: string | undefined;
|
||||
baseId: string | undefined;
|
||||
viewId: string | undefined;
|
||||
baselineFilter: FilterGroup | undefined;
|
||||
baselineSorts: ViewSortConfig[] | undefined;
|
||||
}): ViewDraftState;
|
||||
```
|
||||
|
||||
**Behavior:**
|
||||
|
||||
1. If any of `userId / baseId / viewId` is undefined → return a passthrough state (`draft=null`, `isDirty=false`, setters no-op, `effective*` fall through to baseline). Guards the initial-load window where auth / activeView hasn't resolved yet.
|
||||
2. Otherwise, `useAtom(viewDraftAtomFamily({ userId, baseId, viewId }))` gives `[draft, setDraft]`. Jotai reads from localStorage on first access and writes on every set.
|
||||
3. `setFilter(next)` and `setSorts(next)` compute `merged = { ...(draft ?? {}), [axis]: next, updatedAt: new Date().toISOString() }`. If the result has both `filter` and `sorts` back to `undefined` (the user cleared all local divergence), call `setDraft(RESET)` instead of writing an empty object. (`RESET` is `jotai/utils`' sentinel — it removes the key from localStorage.) This keeps "orphan" drafts from lingering.
|
||||
4. `reset()` is `setDraft(RESET)`.
|
||||
5. `isDirty` is `draft !== null && (!shallowEqualFilter(draft.filter, baselineFilter) || !shallowEqualSorts(draft.sorts, baselineSorts))`. Note the per-axis `??` fallback doesn't appear here because `null/undefined` is the "no local divergence" signal for that axis; only a defined-and-different value counts as dirty.
|
||||
6. `buildPromotedConfig(baseline)` returns `{ ...baseline, filter: draft?.filter ?? baseline.filter, sorts: draft?.sorts ?? baseline.sorts }`. Preserves all non-draft config fields (widths, order, visibility) and only overwrites the two axes that may have diverged.
|
||||
|
||||
**Return composition:**
|
||||
|
||||
- `effectiveFilter = draft?.filter ?? baselineFilter`
|
||||
- `effectiveSorts = draft?.sorts ?? baselineSorts`
|
||||
|
||||
**Cross-tab sync is free.** `atomWithStorage` subscribes to the `storage` event internally — a filter change in tab A triggers a re-render in tab B with no extra code. No manual listener required.
|
||||
|
||||
### Integration into `useBaseTable` and `base-table.tsx`
|
||||
|
||||
`useBaseTable` at [use-base-table.ts:224](../../../apps/client/src/features/base/hooks/use-base-table.ts) currently derives the table's initial sort from `activeView.config.sorts`. In the new world the table's sort/filter state must come from the **effective** values (draft-or-baseline), not the raw `activeView.config`.
|
||||
|
||||
Two cut options were considered:
|
||||
|
||||
**Option A (chosen): drive from effective values via props.** `useBaseTable` takes an additional `effectiveConfig?: ViewConfig` parameter (or, cleaner, the caller passes a shallow-merged `activeView` whose `config` is `{ ...activeView.config, filter: effective.filter, sorts: effective.sorts }`). `buildSortingState` and the row query already read from `activeView.config`, so the cleanest shape is to mutate the config the hook receives, not to introduce a new parameter.
|
||||
|
||||
**Option B (rejected): thread draft deep into `useBaseTable`.** Adds the concept of drafts to a hook that only cares about the rendered state. Muddies responsibilities.
|
||||
|
||||
Going with A. In [base-table.tsx](../../../apps/client/src/features/base/components/base-table.tsx):
|
||||
|
||||
```ts
|
||||
// NEW: wire the draft hook
|
||||
const { data: user } = useCurrentUser();
|
||||
const { draft, effectiveFilter, effectiveSorts, isDirty, setFilter, setSorts, reset, buildPromotedConfig } =
|
||||
useViewDraft({
|
||||
userId: user?.user.id,
|
||||
baseId,
|
||||
viewId: activeView?.id,
|
||||
baselineFilter: activeView?.config?.filter,
|
||||
baselineSorts: activeView?.config?.sorts,
|
||||
});
|
||||
|
||||
// Swap the raw `activeView` for a view with effective config so the table and row query see drafts.
|
||||
const effectiveView = useMemo(
|
||||
() =>
|
||||
activeView
|
||||
? { ...activeView, config: { ...activeView.config, filter: effectiveFilter, sorts: effectiveSorts } }
|
||||
: undefined,
|
||||
[activeView, effectiveFilter, effectiveSorts],
|
||||
);
|
||||
|
||||
// Row query reads effective filter/sorts.
|
||||
const { data: rowsData, ... } = useBaseRowsQuery(
|
||||
base ? baseId : undefined,
|
||||
effectiveFilter,
|
||||
effectiveSorts,
|
||||
);
|
||||
|
||||
// Table is seeded from effectiveView for rendering, but the auto-persist
|
||||
// write-path uses the real `activeView.config` as the baseline so draft
|
||||
// filter/sort values can never leak into a column-layout save.
|
||||
// See "Filter & sort write-path changes" below for the exact mechanism.
|
||||
const { table, persistViewConfig } = useBaseTable(base, rows, effectiveView, {
|
||||
baselineConfig: activeView?.config,
|
||||
});
|
||||
```
|
||||
|
||||
The server-roundtrip `persistViewConfig` keeps being called for column layout changes. It reads from `baselineConfig` — never from the effective/draft state — so a pending layout write cannot bake draft filter/sort values into the server baseline. See the next subsection for the exact implementation.
|
||||
|
||||
### Filter & sort write-path changes
|
||||
|
||||
Today, filter/sort editors feed `BaseToolbar`'s handlers:
|
||||
|
||||
- [base-toolbar.tsx:135-148](../../../apps/client/src/features/base/components/base-toolbar.tsx) `handleSortsChange` → builds config via `buildViewConfigFromTable(table, activeView.config, { sorts: newSorts })` → `updateViewMutation.mutate(...)`.
|
||||
- [base-toolbar.tsx:150-169](../../../apps/client/src/features/base/components/base-toolbar.tsx) `handleFiltersChange` → same pattern with `{ filter }`.
|
||||
|
||||
Both write directly to the server. That's the exact site to branch.
|
||||
|
||||
**New `base-toolbar.tsx`:** accept two new callbacks from `base-table.tsx`:
|
||||
|
||||
```ts
|
||||
onDraftSortsChange: (sorts: ViewSortConfig[]) => void;
|
||||
onDraftFiltersChange: (filter: FilterGroup | undefined) => void;
|
||||
```
|
||||
|
||||
The toolbar drops its internal `updateViewMutation.mutate` calls for sort/filter (retains them for view tabs / view type flip if any exists elsewhere). `handleSortsChange` becomes:
|
||||
|
||||
```ts
|
||||
const handleSortsChange = useCallback(
|
||||
(newSorts: ViewSortConfig[]) => {
|
||||
onDraftSortsChange(newSorts); // writes to useViewDraft via base-table
|
||||
},
|
||||
[onDraftSortsChange],
|
||||
);
|
||||
```
|
||||
|
||||
Same for filters — the FilterCondition[]→FilterGroup wrapping logic at [base-toolbar.tsx:152-157](../../../apps/client/src/features/base/components/base-toolbar.tsx) stays; only the final dispatch target changes.
|
||||
|
||||
**`base-table.tsx`** wires those callbacks to the draft hook:
|
||||
|
||||
```ts
|
||||
const handleDraftSortsChange = useCallback(
|
||||
(sorts: ViewSortConfig[]) => setSorts(sorts.length ? sorts : undefined),
|
||||
[setSorts],
|
||||
);
|
||||
const handleDraftFiltersChange = useCallback(
|
||||
(filter: FilterGroup | undefined) => setFilter(filter),
|
||||
[setFilter],
|
||||
);
|
||||
```
|
||||
|
||||
The "normalize empty to undefined" rule is how we let the draft go clean after the user deletes every filter — the draft hook's "remove key if both axes are undefined" rule then kicks in.
|
||||
|
||||
**Toolbar badge counts:** [base-toolbar.tsx:118-128](../../../apps/client/src/features/base/components/base-toolbar.tsx) currently derives `sorts` and `conditions` from `activeView.config`. Switch these to read from the **effective** config (`effectiveView.config`) so the toolbar badges reflect the draft's count, not the baseline. The toolbar already accepts `activeView` — pass it `effectiveView` instead, since everything the toolbar reads from `activeView` (name, sorts, filter) should be in the effective form.
|
||||
|
||||
**The `buildViewConfigFromTable` call site in `handleColumnReorder` / `handleResizeEnd` / field-visibility:** these continue reading from `activeView.config` (the real baseline) and going through `updateViewMutation`. They do **not** read from the draft. This is deliberate — column layout stays auto-persisted.
|
||||
|
||||
However: `buildViewConfigFromTable` currently spreads its `base` argument and emits `sorts` from the live table state. For the debounced `persistViewConfig` call at [use-base-table.ts:382](../../../apps/client/src/features/base/hooks/use-base-table.ts), the `base` arg is the effective config (because we pass `effectiveView` into `useBaseTable`), but the emitted `sorts` comes from the table's live state — which was seeded from effective. That means if the user drafts a sort and then reorders a column, the debounced persist would write `{ ...effectiveConfig, sorts: draftSorts }` back to the server. **Bug.**
|
||||
|
||||
Fix: when building the config for the auto-persist path in `persistViewConfig`, override the emitted `sorts` and `filter` with the **baseline** values, not the effective ones. Concretely, change [use-base-table.ts:382](../../../apps/client/src/features/base/hooks/use-base-table.ts) to
|
||||
|
||||
```ts
|
||||
const config = buildViewConfigFromTable(table, activeView.config, {
|
||||
sorts: activeView.config?.sorts,
|
||||
filter: activeView.config?.filter,
|
||||
});
|
||||
```
|
||||
|
||||
where `activeView` in that callsite is the **real** activeView (not the effective one). So `useBaseTable` needs both: the effective view for seeding and rendering, and the real baseline for the persist path.
|
||||
|
||||
Simplest refactor: give `useBaseTable` an optional `baselineConfig?: ViewConfig` argument. If omitted (existing callers), behave as today. If provided, `persistViewConfig` uses `baselineConfig` for sort/filter overrides. `base-table.tsx` passes `activeView.config` as the baseline and the effective-wrapped view as the active.
|
||||
|
||||
This keeps `useBaseTable`'s own responsibilities tidy and makes the "drafts don't leak into the layout write-path" rule explicit.
|
||||
|
||||
**Note on `useBaseTable`'s re-seed effect:** A draft edit changes `effectiveView.config.filter/sorts`, which propagates through the `derivedColumnOrder` / `derivedColumnVisibility` memos and re-fires the sync effect at [use-base-table.ts:280](../../../apps/client/src/features/base/hooks/use-base-table.ts). This is harmless because (a) `activeView.id` is unchanged, so the full re-seed branch doesn't trigger, and (b) the `hasPendingEdit` branch preserves live column state when no layout mutation is pending, and adopts derived values otherwise — those derived values are still driven by the same `properties`, so they're content-equal. No action required, but worth naming so the implementer doesn't chase a non-issue.
|
||||
|
||||
## Banner component
|
||||
|
||||
**File:** `apps/client/src/features/base/components/base-view-draft-banner.tsx`
|
||||
|
||||
```ts
|
||||
type BaseViewDraftBannerProps = {
|
||||
isDirty: boolean;
|
||||
canSave: boolean;
|
||||
onReset: () => void;
|
||||
onSave: () => void;
|
||||
saving: boolean;
|
||||
};
|
||||
|
||||
export function BaseViewDraftBanner({ isDirty, canSave, onReset, onSave, saving }: BaseViewDraftBannerProps) {
|
||||
const { t } = useTranslation();
|
||||
if (!isDirty) return null;
|
||||
return (
|
||||
<Paper withBorder radius="sm" px="md" py="xs" /* soft bg per theme */>
|
||||
<Group justify="space-between" wrap="nowrap">
|
||||
<Group gap="xs" wrap="nowrap">
|
||||
<IconInfoCircle size={16} />
|
||||
<Text size="sm">{t("Filter and sort changes are visible only to you.")}</Text>
|
||||
</Group>
|
||||
<Group gap="sm" wrap="nowrap">
|
||||
<Button variant="subtle" color="gray" size="xs" onClick={onReset}>{t("Reset")}</Button>
|
||||
{canSave && (
|
||||
<Button size="xs" onClick={onSave} loading={saving}>{t("Save for everyone")}</Button>
|
||||
)}
|
||||
</Group>
|
||||
</Group>
|
||||
</Paper>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
Wiring in [base-table.tsx](../../../apps/client/src/features/base/components/base-table.tsx), inserted between the existing page chrome and `<BaseToolbar />`:
|
||||
|
||||
```ts
|
||||
const { data: space } = useSpaceQuery(base?.spaceId ?? "");
|
||||
const spaceAbility = useSpaceAbility(space?.membership?.permissions);
|
||||
const canSave = spaceAbility.can(SpaceCaslAction.Edit, SpaceCaslSubject.Base);
|
||||
const updateViewMutation = useUpdateViewMutation();
|
||||
const handleSaveDraft = useCallback(async () => {
|
||||
if (!activeView || !base) return;
|
||||
const config = buildPromotedConfig(activeView.config);
|
||||
await updateViewMutation.mutateAsync({ viewId: activeView.id, baseId: base.id, config });
|
||||
reset();
|
||||
notifications.show({ message: t("View updated for everyone") });
|
||||
}, [activeView, base, buildPromotedConfig, reset, updateViewMutation, t]);
|
||||
|
||||
return (
|
||||
<div style={{...}}>
|
||||
<BaseViewDraftBanner
|
||||
isDirty={isDirty}
|
||||
canSave={canSave}
|
||||
onReset={reset}
|
||||
onSave={handleSaveDraft}
|
||||
saving={updateViewMutation.isPending}
|
||||
/>
|
||||
<BaseToolbar ... />
|
||||
<GridContainer ... />
|
||||
</div>
|
||||
);
|
||||
```
|
||||
|
||||
The `useSpaceQuery`/`useSpaceAbility` pair follows the same pattern as [use-history-restore.tsx:35-41](../../../apps/client/src/features/page-history/hooks/use-history-restore.tsx).
|
||||
|
||||
## Cross-tab sync
|
||||
|
||||
Inherited from `atomWithStorage`. Its internal subscription to the `storage` event re-notifies any Jotai-connected component on other tabs when the matching localStorage key changes, triggering a re-render with the new draft value. No hand-rolled listener in `useViewDraft`.
|
||||
|
||||
React Query's row cache is keyed by `(baseId, filter, sorts, search)` — when the updated draft flows through `effectiveFilter` / `effectiveSorts` on the other tab, the row query refetches as a fresh infinite query via the normal path.
|
||||
|
||||
Edge case: two tabs editing simultaneously — both writes land in localStorage, last-write-wins (same-user scope, acceptable).
|
||||
|
||||
## Save flow (pseudocode)
|
||||
|
||||
```ts
|
||||
async function onSaveForEveryone() {
|
||||
if (!activeView || !base) return;
|
||||
// 1. Compose the promoted config from the server baseline + draft values.
|
||||
// baseline is activeView.config (NOT effectiveView.config) because the
|
||||
// baseline might include layout fields (propertyWidths, propertyOrder,
|
||||
// hiddenPropertyIds, visiblePropertyIds) that we must preserve verbatim.
|
||||
const config: ViewConfig = {
|
||||
...activeView.config,
|
||||
filter: draft.filter ?? activeView.config.filter,
|
||||
sorts: draft.sorts ?? activeView.config.sorts,
|
||||
};
|
||||
// 2. Fire the existing mutation. `updateViewMutation` already:
|
||||
// - optimistically updates the ["bases", baseId] query cache
|
||||
// - rolls back on error
|
||||
// - writes the server response back on success
|
||||
await updateViewMutation.mutateAsync({ viewId: activeView.id, baseId: base.id, config });
|
||||
// 3. Clear the draft. Because the baseline has now caught up to what the
|
||||
// draft said, isDirty flips to false and the banner unmounts.
|
||||
reset();
|
||||
notifications.show({ message: t("View updated for everyone") });
|
||||
}
|
||||
```
|
||||
|
||||
Error handling: `useUpdateViewMutation` already shows a red toast and rolls back the optimistic cache update on failure. We do *not* call `reset()` in that case — the draft stays, the banner stays, the user can retry.
|
||||
|
||||
## Dirty check
|
||||
|
||||
`isDirty` lives inside `useViewDraft`. Returns `true` iff the draft file exists AND at least one of these is true:
|
||||
|
||||
- `draft.filter !== undefined` AND `!deepEqualFilter(draft.filter, baselineFilter)`
|
||||
- `draft.sorts !== undefined` AND `!deepEqualSorts(draft.sorts, baselineSorts)`
|
||||
|
||||
**Deep equality:** the codebase has no `lodash` or `fast-deep-equal` in [client package.json](../../../apps/client/package.json). Options:
|
||||
|
||||
1. **`JSON.stringify` both sides and compare strings.** Trivially correct for `FilterGroup` (a pure data tree) and `ViewSortConfig[]`. Key ordering inside objects is deterministic in V8+ for non-numeric keys, which is the case here. Pick this — it's 4 lines and good enough for this shape.
|
||||
2. Hand-written structural compare — overkill for two types with known finite shapes.
|
||||
|
||||
Go with option 1. Helpers live in `use-view-draft.ts`:
|
||||
|
||||
```ts
|
||||
function filterEq(a: FilterGroup | undefined, b: FilterGroup | undefined) {
|
||||
return JSON.stringify(a ?? null) === JSON.stringify(b ?? null);
|
||||
}
|
||||
function sortsEq(a: ViewSortConfig[] | undefined, b: ViewSortConfig[] | undefined) {
|
||||
return JSON.stringify(a ?? null) === JSON.stringify(b ?? null);
|
||||
}
|
||||
```
|
||||
|
||||
**Orphan suppression.** The agreed rule: when the draft's values equal the baseline, the banner hides. The dirty check above already does that — a draft with `filter: X` where baseline is also `X` yields `filterEq === true` for that axis, and if the sorts axis is also equal (or absent), `isDirty === false`. The key stays in localStorage (no eager GC), but the banner is invisible until the user next diverges or another tab updates the baseline.
|
||||
|
||||
## Testing
|
||||
|
||||
Per [CLAUDE.md](../../../CLAUDE.md), the client has no test infrastructure (no `vitest` in the workspace). This spec does not block on adding one. Testing is primarily manual QA + optional unit tests if Vitest is introduced alongside this feature.
|
||||
|
||||
### Unit tests (proposed, Vitest — gated on harness being added)
|
||||
|
||||
`use-view-draft.test.ts`:
|
||||
|
||||
- **Initialize with no stored value.** Hook returns `draft=null`, `isDirty=false`, effective values fall through to baseline.
|
||||
- **`setFilter` writes to localStorage and updates state.** After `setFilter(X)`, `localStorage.getItem(key)` parses back to `{ filter: X, updatedAt: ... }`, `draft.filter === X`, `isDirty === true`.
|
||||
- **`setSorts` writes independently.** `draft.filter` stays undefined even after `setSorts(...)`, and vice versa.
|
||||
- **`setFilter(undefined)` then `setSorts(undefined)` removes the key.** After both axes are cleared, `localStorage.getItem(key)` is null.
|
||||
- **`reset` clears both state and storage.**
|
||||
- **Draft values equal to baseline → `isDirty === false` without clearing storage.** Set baseline to `B`, set draft filter to `B`, assert `isDirty === false` and `localStorage.getItem(key)` is still non-null (no eager GC).
|
||||
- **Baseline change while draft exists.** Baseline shifts from `B1` to `B2`, draft filter is `X`. Effective filter stays `X`, `isDirty` stays `true`. Then baseline shifts again to `X` — `isDirty` flips to `false` without draft being cleared.
|
||||
- **Cross-tab propagation (integration-level, not strictly a unit test).** `atomWithStorage` handles the `storage` event internally; the only thing our hook contributes is the derivation of `effectiveFilter` / `effectiveSorts` / `isDirty` from the atom value. A single assertion that writing to the atom value in one `Provider` context reflects in another suffices.
|
||||
- **Malformed storage value.** Seed localStorage with garbage under the computed key → `atomWithStorage` yields `null`, hook reports `draft=null`, `isDirty=false`, table receives baseline.
|
||||
- **`userId` missing → passthrough.** All setters are no-ops, `isDirty=false`, effective = baseline.
|
||||
|
||||
### Manual QA checklist
|
||||
|
||||
**Single user, single tab.**
|
||||
- Apply a filter. Banner appears. Row list updates locally.
|
||||
- Click Reset. Banner disappears. Filter in the popover reverts to baseline. Row list reverts.
|
||||
- Apply a filter and a sort. Click Save for everyone. Banner disappears. Refresh the page — the filter/sort is now the new baseline (i.e. came back from the server).
|
||||
- Apply a filter, then manually delete it via the filter popover. Banner disappears. Subsequent refresh does not restore the deleted filter (baseline untouched).
|
||||
|
||||
**Single user, multiple tabs.**
|
||||
- Open base in tab A and tab B. In tab A, add a sort. Tab B re-renders with the same sort applied (verified by checking the sort popover badge and the row order). Tab B shows the banner.
|
||||
- In tab B, click Reset. Tab A's banner disappears and sort reverts.
|
||||
|
||||
**Multi-user baseline race.**
|
||||
- User X (editor) opens base. Applies a filter (draft). User Y (editor) in another session saves a brand-new baseline via their own Save flow. User X's client receives the websocket `base:schema:bumped` → `["bases", baseId]` invalidates → `activeView.config` updates. User X's `effectiveFilter` still shows X's draft filter (draft wins). Banner stays. No UI prompt. If X now clicks Reset, they see Y's new baseline.
|
||||
|
||||
**Permission gating.**
|
||||
- As a space Viewer (who has Read but not Edit on `Base`): open base, apply a filter. Banner appears but shows only "Reset" — no "Save for everyone" button.
|
||||
- Server check: attempting Save as a viewer would have been blocked by [base-view.controller.ts:68](../../../apps/server/src/core/base/controllers/base-view.controller.ts) anyway; the UI gate is belt-and-suspenders.
|
||||
|
||||
**Reset with popover open.**
|
||||
- Open the filter popover and add conditions. Without closing the popover, click Reset (the banner is visible behind the popover dropdown — it's positioned above). Popover closes on outside-click, baseline conditions show next open.
|
||||
|
||||
**Save clears draft + updates server.**
|
||||
- Save. Banner vanishes. localStorage key for `{user,base,view}` is absent. Re-open the base in an incognito/second-account browser — the filter/sort shows too (from the server).
|
||||
|
||||
**Browser storage cleared.**
|
||||
- In DevTools, wipe `localStorage`. Base re-renders with baseline. Banner gone. Expected.
|
||||
|
||||
## Rollout
|
||||
|
||||
- **No DB migration.** No server change.
|
||||
- **No feature flag.** Behavior change ships as-is.
|
||||
- **No data migration.** Existing users have no drafts; the system starts empty.
|
||||
- **Behavioral change vs. today.** Existing users' muscle memory is "touch a filter → auto-saves for everyone". After this ships, that becomes "touch a filter → only I see it until I hit Save for everyone". This is the entire point of the feature but will surprise power users on day one.
|
||||
- Mitigation: none in v1. A one-time popover/tooltip pointing at the banner ("New: filter and sort changes are now a draft until you save") is worth doing, but falls squarely in YAGNI territory for the first ship.
|
||||
- **Followup:** consider a dismissible one-time in-product hint the first time a user diverges from baseline after the deploy. Flag this as a follow-up task; do not ship with v1.
|
||||
|
||||
## Risks & open questions
|
||||
|
||||
- **localStorage quota.** `FilterGroup` + `ViewSortConfig[]` is tiny — a realistic draft is under 2KB. A worst-case malicious user with thousands of views could hit the 5–10MB per-origin cap, but practically negligible. No cleanup logic needed.
|
||||
- **Users losing drafts via browser data clear.** Expected. The banner is a live indicator, not a durable source of truth. Flagged in non-goals.
|
||||
- **Multi-device divergence.** Same user on laptop and phone: drafts don't sync. Expected and flagged.
|
||||
- **Dropdown caret ("Save as new view") in the screenshot.** Explicitly out of scope for v1. If we add it, the caret menu would include:
|
||||
1. "Save for everyone" (current behavior)
|
||||
2. "Save as new view" (creates a new `IBaseView` with draft values baked into `config`)
|
||||
- **Baseline layout fields overriding draft.** Save flow does `{ ...activeView.config, filter: X, sorts: Y }`. If another user changed column widths right before Save, those widths land in the Save's payload (we already read the latest optimistic cache). Acceptable — the alternative (send a sparse patch with only `{filter, sorts}`) would require a server-side partial-update endpoint we don't have.
|
||||
- **Invalid draft for stale schema.** If a property is deleted while a user's draft references it by id, the predicate/sort engine on the server silently drops unknown property ids. Client-side, the sort/filter popover shows the condition with a missing-property label (existing behavior — the toolbar already does `properties.find((p) => p.id === …)` and tolerates the `undefined` case). No special handling needed here; the draft just falls away when the user next edits and doesn't re-add the dead condition.
|
||||
- **`SpaceCaslSubject.Base` missing from client enum.** Single-line fix at [permissions.type.ts:12](../../../apps/client/src/features/space/permissions/permissions.type.ts). Flagged so reviewers notice.
|
||||
|
||||
## Future extension
|
||||
|
||||
1. **Draft column layout.** Extend the draft shape to carry `propertyWidths`, `propertyOrder`, `hiddenPropertyIds`, `visiblePropertyIds`. Column reorder / hide / resize call the draft hook instead of `persistViewConfig`. `useBaseTable` then seeds column state from effective values. Mechanically identical to filter/sort — the hook already takes arbitrary ViewConfig fragments. The only reason this isn't in v1 is to minimize behavioral change surface and keep the spec scope narrow.
|
||||
2. **Server-side per-user drafts.** For cross-device sync, add a `base_view_drafts` table keyed by `(userId, viewId)` storing the same shape. The client hook swaps localStorage for a paired mutation + query. The banner UX stays identical.
|
||||
3. **Split-button save.** Dropdown caret next to "Save for everyone" offering "Save as new view" — creates an `IBaseView` via `createView` with the effective config. Deepens the Notion parallel.
|
||||
4. **Draft conflict hint.** When baseline changes while I have drafts, show a subtle "Baseline has changed since your last edit" line inside the banner with a "Discard draft and load latest" affordance. Expected to be low value in practice — flag once real users report it.
|
||||
@@ -1,22 +0,0 @@
|
||||
import { atomFamily, atomWithStorage } from "jotai/utils";
|
||||
import { BaseViewDraft } from "@/features/base/types/base.types";
|
||||
|
||||
export type ViewDraftKey = {
|
||||
userId: string;
|
||||
baseId: string;
|
||||
viewId: string;
|
||||
};
|
||||
|
||||
export const viewDraftStorageKey = (k: ViewDraftKey) =>
|
||||
`docmost:base-view-draft:v1:${k.userId}:${k.baseId}:${k.viewId}`;
|
||||
|
||||
// `atomWithStorage` handles JSON serialization, cross-tab sync via the
|
||||
// `storage` event, and lazy first-read out of the box. `atomFamily`'s
|
||||
// comparator ensures the same triple resolves to the same atom instance
|
||||
// across renders, so identity-equality cache hits in Jotai still work.
|
||||
export const viewDraftAtomFamily = atomFamily(
|
||||
(k: ViewDraftKey) =>
|
||||
atomWithStorage<BaseViewDraft | null>(viewDraftStorageKey(k), null),
|
||||
(a, b) =>
|
||||
a.userId === b.userId && a.baseId === b.baseId && a.viewId === b.viewId,
|
||||
);
|
||||
@@ -1,6 +1,5 @@
|
||||
import { useCallback, useEffect, useMemo } from "react";
|
||||
import { Text, Stack } from "@mantine/core";
|
||||
import { notifications } from "@mantine/notifications";
|
||||
import { useAtom } from "jotai";
|
||||
import { IconDatabase } from "@tabler/icons-react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
@@ -8,10 +7,6 @@ import { arrayMove } from "@dnd-kit/sortable";
|
||||
import { generateJitteredKeyBetween } from "fractional-indexing-jittered";
|
||||
import { useBaseQuery } from "@/features/base/queries/base-query";
|
||||
import { useBaseSocket } from "@/features/base/hooks/use-base-socket";
|
||||
import {
|
||||
FilterGroup,
|
||||
ViewSortConfig,
|
||||
} from "@/features/base/types/base.types";
|
||||
import {
|
||||
useBaseRowsQuery,
|
||||
flattenRows,
|
||||
@@ -19,24 +14,12 @@ import {
|
||||
import { useUpdateRowMutation } from "@/features/base/queries/base-row-query";
|
||||
import { useCreateRowMutation } from "@/features/base/queries/base-row-query";
|
||||
import { useReorderRowMutation } from "@/features/base/queries/base-row-query";
|
||||
import {
|
||||
useCreateViewMutation,
|
||||
useUpdateViewMutation,
|
||||
} from "@/features/base/queries/base-view-query";
|
||||
import { useCreateViewMutation } from "@/features/base/queries/base-view-query";
|
||||
import { activeViewIdAtom } from "@/features/base/atoms/base-atoms";
|
||||
import { useBaseTable } from "@/features/base/hooks/use-base-table";
|
||||
import { useRowSelection } from "@/features/base/hooks/use-row-selection";
|
||||
import useCurrentUser from "@/features/user/hooks/use-current-user";
|
||||
import { useViewDraft } from "@/features/base/hooks/use-view-draft";
|
||||
import { useSpaceQuery } from "@/features/space/queries/space-query";
|
||||
import { useSpaceAbility } from "@/features/space/permissions/use-space-ability";
|
||||
import {
|
||||
SpaceCaslAction,
|
||||
SpaceCaslSubject,
|
||||
} from "@/features/space/permissions/permissions.type";
|
||||
import { GridContainer } from "@/features/base/components/grid/grid-container";
|
||||
import { BaseToolbar } from "@/features/base/components/base-toolbar";
|
||||
import { BaseViewDraftBanner } from "@/features/base/components/base-view-draft-banner";
|
||||
import { BaseTableSkeleton } from "@/features/base/components/base-table-skeleton";
|
||||
import classes from "@/features/base/styles/grid.module.css";
|
||||
|
||||
@@ -59,59 +42,8 @@ export function BaseTable({ baseId }: BaseTableProps) {
|
||||
return views.find((v) => v.id === activeViewId) ?? views[0];
|
||||
}, [views, activeViewId]);
|
||||
|
||||
const { data: currentUser } = useCurrentUser();
|
||||
const {
|
||||
draft: _draft,
|
||||
effectiveFilter,
|
||||
effectiveSorts,
|
||||
isDirty,
|
||||
setFilter: setDraftFilter,
|
||||
setSorts: setDraftSorts,
|
||||
reset: resetDraft,
|
||||
buildPromotedConfig,
|
||||
} = useViewDraft({
|
||||
userId: currentUser?.user.id,
|
||||
baseId,
|
||||
viewId: activeView?.id,
|
||||
baselineFilter: activeView?.config?.filter,
|
||||
baselineSorts: activeView?.config?.sorts,
|
||||
});
|
||||
|
||||
// Render view: baseline merged with any local draft. Passed to
|
||||
// `useBaseTable` (for table state seeding) and to the toolbar (for badge
|
||||
// counts). The real `activeView` is still used as the auto-persist
|
||||
// baseline so drafts can't leak into column-layout writes.
|
||||
const effectiveView = useMemo(
|
||||
() =>
|
||||
activeView
|
||||
? {
|
||||
...activeView,
|
||||
config: {
|
||||
...activeView.config,
|
||||
filter: effectiveFilter,
|
||||
sorts: effectiveSorts,
|
||||
},
|
||||
}
|
||||
: undefined,
|
||||
[activeView, effectiveFilter, effectiveSorts],
|
||||
);
|
||||
|
||||
// Effective values drive the row query and the client-side position
|
||||
// sort guard below. The old `activeView.config` reads are no longer the
|
||||
// source of truth once drafts are involved.
|
||||
const activeFilter = effectiveFilter;
|
||||
const activeSorts = effectiveSorts;
|
||||
|
||||
// `useSpaceQuery` is guarded by `enabled: !!spaceId` internally, so
|
||||
// passing `""` when `base` hasn't loaded yet is safe. See
|
||||
// use-history-restore.tsx for the same pattern.
|
||||
const { data: space } = useSpaceQuery(base?.spaceId ?? "");
|
||||
const spaceAbility = useSpaceAbility(space?.membership?.permissions);
|
||||
const canSave = spaceAbility.can(
|
||||
SpaceCaslAction.Edit,
|
||||
SpaceCaslSubject.Base,
|
||||
);
|
||||
|
||||
const activeFilter = activeView?.config?.filter;
|
||||
const activeSorts = activeView?.config?.sorts;
|
||||
// Hold the rows query until `base` has loaded. Otherwise the query
|
||||
// fires once with `activeFilter` / `activeSorts` still undefined
|
||||
// (a "bland" list request), then fires a second time as soon as the
|
||||
@@ -124,7 +56,6 @@ export function BaseTable({ baseId }: BaseTableProps) {
|
||||
const createRowMutation = useCreateRowMutation();
|
||||
const reorderRowMutation = useReorderRowMutation();
|
||||
const createViewMutation = useCreateViewMutation();
|
||||
const updateViewMutation = useUpdateViewMutation();
|
||||
|
||||
useEffect(() => {
|
||||
if (activeView && activeViewId !== activeView.id) {
|
||||
@@ -154,9 +85,7 @@ export function BaseTable({ baseId }: BaseTableProps) {
|
||||
);
|
||||
}, [rowsData, activeSorts]);
|
||||
|
||||
const { table, persistViewConfig } = useBaseTable(base, rows, effectiveView, {
|
||||
baselineConfig: activeView?.config,
|
||||
});
|
||||
const { table, persistViewConfig } = useBaseTable(base, rows, activeView);
|
||||
|
||||
const handleCellUpdate = useCallback(
|
||||
(rowId: string, propertyId: string, value: unknown) => {
|
||||
@@ -206,48 +135,6 @@ export function BaseTable({ baseId }: BaseTableProps) {
|
||||
persistViewConfig();
|
||||
}, [persistViewConfig]);
|
||||
|
||||
const handleDraftSortsChange = useCallback(
|
||||
(sorts: ViewSortConfig[] | undefined) => {
|
||||
setDraftSorts(sorts && sorts.length > 0 ? sorts : undefined);
|
||||
},
|
||||
[setDraftSorts],
|
||||
);
|
||||
|
||||
const handleDraftFiltersChange = useCallback(
|
||||
(filter: FilterGroup | undefined) => {
|
||||
setDraftFilter(filter);
|
||||
},
|
||||
[setDraftFilter],
|
||||
);
|
||||
|
||||
const handleSaveDraft = useCallback(async () => {
|
||||
if (!activeView || !base) return;
|
||||
// `buildPromotedConfig` preserves all non-draft baseline fields
|
||||
// (widths/order/visibility) and only overwrites filter/sorts when the
|
||||
// draft has divergent values.
|
||||
const config = buildPromotedConfig(activeView.config);
|
||||
try {
|
||||
await updateViewMutation.mutateAsync({
|
||||
viewId: activeView.id,
|
||||
baseId: base.id,
|
||||
config,
|
||||
});
|
||||
resetDraft();
|
||||
notifications.show({ message: t("View updated for everyone") });
|
||||
} catch {
|
||||
// `useUpdateViewMutation` already shows a red toast on error and
|
||||
// rolls back the optimistic cache; keep the draft so the user can
|
||||
// retry without re-typing.
|
||||
}
|
||||
}, [
|
||||
activeView,
|
||||
base,
|
||||
buildPromotedConfig,
|
||||
resetDraft,
|
||||
t,
|
||||
updateViewMutation,
|
||||
]);
|
||||
|
||||
const handleRowReorder = useCallback(
|
||||
(rowId: string, targetRowId: string, dropPosition: "above" | "below") => {
|
||||
const remainingRows = rows.filter((r) => r.id !== rowId);
|
||||
@@ -302,23 +189,14 @@ export function BaseTable({ baseId }: BaseTableProps) {
|
||||
|
||||
return (
|
||||
<div style={{ display: "flex", flexDirection: "column", height: "100%" }}>
|
||||
<BaseViewDraftBanner
|
||||
isDirty={isDirty}
|
||||
canSave={canSave}
|
||||
onReset={resetDraft}
|
||||
onSave={handleSaveDraft}
|
||||
saving={updateViewMutation.isPending}
|
||||
/>
|
||||
<BaseToolbar
|
||||
base={base}
|
||||
activeView={effectiveView}
|
||||
activeView={activeView}
|
||||
views={views}
|
||||
table={table}
|
||||
onViewChange={handleViewChange}
|
||||
onAddView={handleAddView}
|
||||
onPersistViewConfig={persistViewConfig}
|
||||
onDraftSortsChange={handleDraftSortsChange}
|
||||
onDraftFiltersChange={handleDraftFiltersChange}
|
||||
/>
|
||||
<GridContainer
|
||||
table={table}
|
||||
|
||||
@@ -16,6 +16,8 @@ import {
|
||||
FilterCondition,
|
||||
FilterGroup,
|
||||
} from "@/features/base/types/base.types";
|
||||
import { useUpdateViewMutation } from "@/features/base/queries/base-view-query";
|
||||
import { buildViewConfigFromTable } from "@/features/base/hooks/use-base-table";
|
||||
import { exportBaseToCsv } from "@/features/base/services/base-service";
|
||||
import { ViewTabs } from "@/features/base/components/views/view-tabs";
|
||||
import { ViewSortConfigPopover } from "@/features/base/components/views/view-sort-config";
|
||||
@@ -26,18 +28,12 @@ import classes from "@/features/base/styles/grid.module.css";
|
||||
|
||||
type BaseToolbarProps = {
|
||||
base: IBase;
|
||||
// Effective view — baseline merged with any local draft. Badge counts
|
||||
// and sort/filter popover seed data read from this. The real baseline
|
||||
// only enters via `onDraftSortsChange` / `onDraftFiltersChange`
|
||||
// callbacks defined by the parent.
|
||||
activeView: IBaseView | undefined;
|
||||
views: IBaseView[];
|
||||
table: Table<IBaseRow>;
|
||||
onViewChange: (viewId: string) => void;
|
||||
onAddView?: () => void;
|
||||
onPersistViewConfig: () => void;
|
||||
onDraftSortsChange: (sorts: ViewSortConfig[] | undefined) => void;
|
||||
onDraftFiltersChange: (filter: FilterGroup | undefined) => void;
|
||||
};
|
||||
|
||||
export function BaseToolbar({
|
||||
@@ -48,8 +44,6 @@ export function BaseToolbar({
|
||||
onViewChange,
|
||||
onAddView,
|
||||
onPersistViewConfig,
|
||||
onDraftSortsChange,
|
||||
onDraftFiltersChange,
|
||||
}: BaseToolbarProps) {
|
||||
const { t } = useTranslation();
|
||||
const [sortOpened, setSortOpened] = useState(false);
|
||||
@@ -119,6 +113,8 @@ export function BaseToolbar({
|
||||
setFieldsOpened(panel === "fields" ? (v) => !v : false);
|
||||
}, []);
|
||||
|
||||
const updateViewMutation = useUpdateViewMutation();
|
||||
|
||||
const sorts = activeView?.config?.sorts ?? [];
|
||||
// Stored view config uses the engine's filter tree. The popover edits
|
||||
// an AND-only flat list; we unwrap the top-level group's children when
|
||||
@@ -138,24 +134,38 @@ export function BaseToolbar({
|
||||
|
||||
const handleSortsChange = useCallback(
|
||||
(newSorts: ViewSortConfig[]) => {
|
||||
// Normalize empty to undefined so the draft hook can drop the `sorts`
|
||||
// axis (and remove its localStorage entry when both axes go clean).
|
||||
onDraftSortsChange(newSorts.length > 0 ? newSorts : undefined);
|
||||
if (!activeView) return;
|
||||
const config = buildViewConfigFromTable(table, activeView.config, {
|
||||
sorts: newSorts,
|
||||
});
|
||||
updateViewMutation.mutate({
|
||||
viewId: activeView.id,
|
||||
baseId: base.id,
|
||||
config,
|
||||
});
|
||||
},
|
||||
[onDraftSortsChange],
|
||||
[activeView, base.id, table, updateViewMutation],
|
||||
);
|
||||
|
||||
const handleFiltersChange = useCallback(
|
||||
(newConditions: FilterCondition[]) => {
|
||||
// Wrap the AND-flat popover output into the engine's FilterGroup shape.
|
||||
// Pass `undefined` to drop the filter axis from the draft entirely.
|
||||
if (!activeView) return;
|
||||
const filter: FilterGroup | undefined =
|
||||
newConditions.length > 0
|
||||
? { op: "and", children: newConditions }
|
||||
: undefined;
|
||||
onDraftFiltersChange(filter);
|
||||
// `filter: undefined` in overrides removes the filter key; the helper's
|
||||
// spread-then-overrides order means `undefined` wins over any base filter.
|
||||
const config = buildViewConfigFromTable(table, activeView.config, {
|
||||
filter,
|
||||
});
|
||||
updateViewMutation.mutate({
|
||||
viewId: activeView.id,
|
||||
baseId: base.id,
|
||||
config,
|
||||
});
|
||||
},
|
||||
[onDraftFiltersChange],
|
||||
[activeView, base.id, table, updateViewMutation],
|
||||
);
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
import { Group, Button, Tooltip } from "@mantine/core";
|
||||
import { useTranslation } from "react-i18next";
|
||||
|
||||
type BaseViewDraftBannerProps = {
|
||||
isDirty: boolean;
|
||||
canSave: boolean;
|
||||
onReset: () => void;
|
||||
onSave: () => void;
|
||||
saving: boolean;
|
||||
};
|
||||
|
||||
export function BaseViewDraftBanner({
|
||||
isDirty,
|
||||
canSave,
|
||||
onReset,
|
||||
onSave,
|
||||
saving,
|
||||
}: BaseViewDraftBannerProps) {
|
||||
const { t } = useTranslation();
|
||||
if (!isDirty) return null;
|
||||
return (
|
||||
<Group justify="flex-end" gap="xs" px="md" py={6} wrap="nowrap">
|
||||
<Button variant="subtle" color="gray" size="xs" onClick={onReset}>
|
||||
{t("Reset")}
|
||||
</Button>
|
||||
{canSave && (
|
||||
<Tooltip
|
||||
label={t("Filter and sort changes are visible only to you")}
|
||||
position="bottom"
|
||||
withArrow
|
||||
>
|
||||
<Button
|
||||
variant="light"
|
||||
color="orange"
|
||||
size="xs"
|
||||
onClick={onSave}
|
||||
loading={saving}
|
||||
>
|
||||
{t("Save for everyone")}
|
||||
</Button>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Group>
|
||||
);
|
||||
}
|
||||
@@ -1,268 +0,0 @@
|
||||
import { useState, useRef, useEffect, useCallback, useMemo } from "react";
|
||||
import { Popover, ActionIcon, Text } from "@mantine/core";
|
||||
import { useDebouncedValue } from "@mantine/hooks";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { IconX, IconFileDescription } from "@tabler/icons-react";
|
||||
import { Link } from "react-router-dom";
|
||||
import clsx from "clsx";
|
||||
import { IBaseProperty } from "@/features/base/types/base.types";
|
||||
import { useResolvedPages } from "@/features/base/queries/base-page-resolver-query";
|
||||
import { useBaseQuery } from "@/features/base/queries/base-query";
|
||||
import { searchSuggestions } from "@/features/search/services/search-service";
|
||||
import { buildPageUrl } from "@/features/page/page.utils";
|
||||
import { useListKeyboardNav } from "@/features/base/hooks/use-list-keyboard-nav";
|
||||
import cellClasses from "@/features/base/styles/cells.module.css";
|
||||
|
||||
type CellPageProps = {
|
||||
value: unknown;
|
||||
property: IBaseProperty;
|
||||
rowId: string;
|
||||
isEditing: boolean;
|
||||
onCommit: (value: unknown) => void;
|
||||
onCancel: () => void;
|
||||
};
|
||||
|
||||
type PageSuggestion = {
|
||||
id: string;
|
||||
slugId: string;
|
||||
title: string | null;
|
||||
icon: string | null;
|
||||
spaceId: string;
|
||||
space?: { id: string; slug: string; name: string } | null;
|
||||
};
|
||||
|
||||
function parsePageId(value: unknown): string | null {
|
||||
if (typeof value === "string" && value.length > 0) return value;
|
||||
return null;
|
||||
}
|
||||
|
||||
export function CellPage({
|
||||
value,
|
||||
property,
|
||||
isEditing,
|
||||
onCommit,
|
||||
onCancel,
|
||||
}: CellPageProps) {
|
||||
const pageId = parsePageId(value);
|
||||
const { data: base } = useBaseQuery(property.baseId);
|
||||
|
||||
const ids = useMemo(() => (pageId ? [pageId] : []), [pageId]);
|
||||
const { pages } = useResolvedPages(ids);
|
||||
const resolvedPage = pageId ? pages.get(pageId) : undefined;
|
||||
|
||||
if (isEditing) {
|
||||
return (
|
||||
<PagePicker
|
||||
pageId={pageId}
|
||||
resolvedPage={resolvedPage ?? null}
|
||||
spaceId={base?.spaceId}
|
||||
onCommit={onCommit}
|
||||
onCancel={onCancel}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (!pageId) {
|
||||
return <span className={cellClasses.emptyValue} />;
|
||||
}
|
||||
|
||||
if (resolvedPage === undefined) {
|
||||
// Still resolving — render an empty pill-shaped placeholder to avoid
|
||||
// the "Page not found" flicker on initial load.
|
||||
return <span className={cellClasses.emptyValue} />;
|
||||
}
|
||||
|
||||
if (resolvedPage === null) {
|
||||
return (
|
||||
<span className={cellClasses.pageMissing}>
|
||||
<IconFileDescription size={14} />
|
||||
<span>Page not found</span>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
return <PagePill page={resolvedPage} />;
|
||||
}
|
||||
|
||||
type PillPage = {
|
||||
slugId: string;
|
||||
title: string | null;
|
||||
icon: string | null;
|
||||
space: { slug: string } | null;
|
||||
};
|
||||
|
||||
function PagePill({ page }: { page: PillPage }) {
|
||||
const title = page.title || "Untitled";
|
||||
const spaceSlug = page.space?.slug ?? "";
|
||||
const url = buildPageUrl(spaceSlug, page.slugId, title);
|
||||
|
||||
return (
|
||||
<Link
|
||||
to={url}
|
||||
className={cellClasses.pagePill}
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
onDoubleClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{page.icon ? (
|
||||
<span className={cellClasses.pagePillIcon}>{page.icon}</span>
|
||||
) : (
|
||||
<IconFileDescription size={14} className={cellClasses.pagePillIconFallback} />
|
||||
)}
|
||||
<span className={cellClasses.pagePillText}>{title}</span>
|
||||
</Link>
|
||||
);
|
||||
}
|
||||
|
||||
type PagePickerProps = {
|
||||
pageId: string | null;
|
||||
resolvedPage: { id: string; slugId: string; title: string | null; icon: string | null; space: { id: string; slug: string; name: string } | null } | null;
|
||||
spaceId?: string;
|
||||
onCommit: (value: unknown) => void;
|
||||
onCancel: () => void;
|
||||
};
|
||||
|
||||
function PagePicker({
|
||||
pageId,
|
||||
resolvedPage,
|
||||
spaceId,
|
||||
onCommit,
|
||||
onCancel,
|
||||
}: PagePickerProps) {
|
||||
const [search, setSearch] = useState("");
|
||||
const [debouncedSearch] = useDebouncedValue(search, 250);
|
||||
const searchRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
requestAnimationFrame(() => searchRef.current?.focus());
|
||||
}, []);
|
||||
|
||||
const trimmed = debouncedSearch.trim();
|
||||
const { data: suggestions = [] } = useQuery({
|
||||
queryKey: ["bases", "pages", "search", trimmed, spaceId ?? ""],
|
||||
queryFn: async () => {
|
||||
const res = await searchSuggestions({
|
||||
query: trimmed,
|
||||
includePages: true,
|
||||
spaceId,
|
||||
limit: trimmed ? 25 : 5,
|
||||
});
|
||||
return (res.pages ?? []) as PageSuggestion[];
|
||||
},
|
||||
staleTime: 15_000,
|
||||
});
|
||||
|
||||
const { activeIndex, setActiveIndex, handleNavKey, setOptionRef } =
|
||||
useListKeyboardNav(suggestions.length, [debouncedSearch]);
|
||||
|
||||
const handleSelect = useCallback(
|
||||
(id: string) => {
|
||||
onCommit(id === pageId ? null : id);
|
||||
},
|
||||
[pageId, onCommit],
|
||||
);
|
||||
|
||||
const handleRemove = useCallback(() => {
|
||||
onCommit(null);
|
||||
}, [onCommit]);
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(e: React.KeyboardEvent) => {
|
||||
if (e.key === "Escape") {
|
||||
e.preventDefault();
|
||||
onCancel();
|
||||
return;
|
||||
}
|
||||
if (handleNavKey(e)) return;
|
||||
if (e.key === "Enter") {
|
||||
if (activeIndex < 0 || activeIndex >= suggestions.length) return;
|
||||
e.preventDefault();
|
||||
handleSelect(suggestions[activeIndex].id);
|
||||
}
|
||||
},
|
||||
[onCancel, handleNavKey, activeIndex, suggestions, handleSelect],
|
||||
);
|
||||
|
||||
return (
|
||||
<Popover opened onClose={onCancel} position="bottom-start" width={320} trapFocus>
|
||||
<Popover.Target>
|
||||
<div style={{ width: "100%", height: "100%" }}>
|
||||
{resolvedPage ? <PagePill page={resolvedPage} /> : <span className={cellClasses.emptyValue} />}
|
||||
</div>
|
||||
</Popover.Target>
|
||||
<Popover.Dropdown p={0}>
|
||||
<div className={cellClasses.personTagArea}>
|
||||
{pageId && resolvedPage && (
|
||||
<span className={cellClasses.personTag}>
|
||||
{resolvedPage.icon ? (
|
||||
<span>{resolvedPage.icon}</span>
|
||||
) : (
|
||||
<IconFileDescription size={14} />
|
||||
)}
|
||||
<span className={cellClasses.personTagName}>
|
||||
{resolvedPage.title || "Untitled"}
|
||||
</span>
|
||||
<button
|
||||
type="button"
|
||||
className={cellClasses.personTagRemove}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleRemove();
|
||||
}}
|
||||
>
|
||||
<IconX size={10} />
|
||||
</button>
|
||||
</span>
|
||||
)}
|
||||
<input
|
||||
ref={searchRef}
|
||||
className={cellClasses.personTagInput}
|
||||
placeholder={pageId ? "" : "Search for a page..."}
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.currentTarget.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className={cellClasses.personDropdownDivider} />
|
||||
<div className={cellClasses.selectDropdown}>
|
||||
{suggestions.length === 0 && (
|
||||
<div className={cellClasses.personDropdownHint}>
|
||||
{trimmed ? "No pages found" : "No pages yet"}
|
||||
</div>
|
||||
)}
|
||||
{suggestions.map((page, idx) => {
|
||||
const isSelected = page.id === pageId;
|
||||
return (
|
||||
<div
|
||||
key={page.id}
|
||||
ref={setOptionRef(idx)}
|
||||
className={clsx(
|
||||
cellClasses.selectOption,
|
||||
isSelected && cellClasses.selectOptionActive,
|
||||
idx === activeIndex && cellClasses.selectOptionKeyboardActive,
|
||||
)}
|
||||
onMouseEnter={() => setActiveIndex(idx)}
|
||||
onMouseDown={(e) => e.preventDefault()}
|
||||
onClick={() => handleSelect(page.id)}
|
||||
>
|
||||
{page.icon ? (
|
||||
<span>{page.icon}</span>
|
||||
) : (
|
||||
<IconFileDescription size={14} />
|
||||
)}
|
||||
<span className={cellClasses.personOptionName}>
|
||||
{page.title || "Untitled"}
|
||||
</span>
|
||||
{page.space?.name && (
|
||||
<Text size="xs" c="dimmed" ml="auto" truncate>
|
||||
{page.space.name}
|
||||
</Text>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</Popover.Dropdown>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
@@ -15,7 +15,6 @@ import { CellUrl } from "@/features/base/components/cells/cell-url";
|
||||
import { CellEmail } from "@/features/base/components/cells/cell-email";
|
||||
import { CellPerson } from "@/features/base/components/cells/cell-person";
|
||||
import { CellFile } from "@/features/base/components/cells/cell-file";
|
||||
import { CellPage } from "@/features/base/components/cells/cell-page";
|
||||
import { CellCreatedAt } from "@/features/base/components/cells/cell-created-at";
|
||||
import { CellLastEditedAt } from "@/features/base/components/cells/cell-last-edited-at";
|
||||
import { CellLastEditedBy } from "@/features/base/components/cells/cell-last-edited-by";
|
||||
@@ -46,7 +45,6 @@ const cellComponents: Record<
|
||||
email: CellEmail,
|
||||
person: CellPerson,
|
||||
file: CellFile,
|
||||
page: CellPage,
|
||||
createdAt: CellCreatedAt,
|
||||
lastEditedAt: CellLastEditedAt,
|
||||
lastEditedBy: CellLastEditedBy,
|
||||
|
||||
@@ -236,7 +236,6 @@ export function CreatePropertyPopover({ baseId, onPropertyCreated }: CreatePrope
|
||||
<TextInput
|
||||
ref={nameInputRef}
|
||||
size="xs"
|
||||
label={t("Name")}
|
||||
placeholder={selectedTypeLabel}
|
||||
value={name}
|
||||
onChange={(e) => setName(e.currentTarget.value)}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useMemo } from "react";
|
||||
import { useCallback } from "react";
|
||||
import { Stack, NumberInput, Select, Switch, Text } from "@mantine/core";
|
||||
import {
|
||||
IBaseProperty,
|
||||
@@ -88,7 +88,7 @@ function SelectOptions({
|
||||
hideButtons?: boolean;
|
||||
}) {
|
||||
const options = property.typeOptions as SelectTypeOptions | undefined;
|
||||
const choices = useMemo(() => options?.choices ?? [], [options?.choices]);
|
||||
const choices = options?.choices ?? [];
|
||||
|
||||
const handleSave = useCallback(
|
||||
(newChoices: Choice[]) => {
|
||||
@@ -127,7 +127,7 @@ function StatusOptions({
|
||||
hideButtons?: boolean;
|
||||
}) {
|
||||
const options = property.typeOptions as SelectTypeOptions | undefined;
|
||||
const choices = useMemo(() => options?.choices ?? [], [options?.choices]);
|
||||
const choices = options?.choices ?? [];
|
||||
|
||||
const handleSave = useCallback(
|
||||
(newChoices: Choice[]) => {
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
IconCalendar,
|
||||
IconUser,
|
||||
IconPaperclip,
|
||||
IconFileDescription,
|
||||
IconCheckbox,
|
||||
IconLink,
|
||||
IconMail,
|
||||
@@ -36,7 +35,6 @@ const propertyTypes: {
|
||||
{ type: "date", icon: IconCalendar, labelKey: "Date" },
|
||||
{ type: "person", icon: IconUser, labelKey: "Person" },
|
||||
{ type: "file", icon: IconPaperclip, labelKey: "File" },
|
||||
{ type: "page", icon: IconFileDescription, labelKey: "Page" },
|
||||
{ type: "checkbox", icon: IconCheckbox, labelKey: "Checkbox" },
|
||||
{ type: "url", icon: IconLink, labelKey: "URL" },
|
||||
{ type: "email", icon: IconMail, labelKey: "Email" },
|
||||
|
||||
@@ -65,8 +65,6 @@ function getOperatorsForType(type: string): FilterOperator[] {
|
||||
return ["eq", "neq", "any", "none", "isEmpty", "isNotEmpty"];
|
||||
case "file":
|
||||
return ["isEmpty", "isNotEmpty"];
|
||||
case "page":
|
||||
return ["isEmpty", "isNotEmpty"];
|
||||
default:
|
||||
return ["eq", "neq", "isEmpty", "isNotEmpty"];
|
||||
}
|
||||
|
||||
@@ -41,11 +41,7 @@ export function ViewSortConfigPopover({
|
||||
if (!opened) setDraft(null);
|
||||
}, [opened]);
|
||||
|
||||
// Page properties store a UUID; sorting by raw UUID is unhelpful and
|
||||
// title-based sort would require a join. Hide until we support it properly.
|
||||
const sortableProperties = properties.filter((p) => p.type !== "page");
|
||||
|
||||
const propertyOptions = sortableProperties.map((p) => ({
|
||||
const propertyOptions = properties.map((p) => ({
|
||||
value: p.id,
|
||||
label: p.name,
|
||||
}));
|
||||
@@ -57,10 +53,10 @@ export function ViewSortConfigPopover({
|
||||
|
||||
const handleStartDraft = useCallback(() => {
|
||||
const usedIds = new Set(sorts.map((s) => s.propertyId));
|
||||
const available = sortableProperties.find((p) => !usedIds.has(p.id));
|
||||
const available = properties.find((p) => !usedIds.has(p.id));
|
||||
if (!available) return;
|
||||
setDraft({ propertyId: available.id, direction: "asc" });
|
||||
}, [sorts, sortableProperties]);
|
||||
}, [sorts, properties]);
|
||||
|
||||
const handleSaveDraft = useCallback(() => {
|
||||
if (!draft) return;
|
||||
@@ -103,8 +99,7 @@ export function ViewSortConfigPopover({
|
||||
[sorts, onChange],
|
||||
);
|
||||
|
||||
const canAddMore =
|
||||
sortableProperties.length > sorts.length + (draft ? 1 : 0);
|
||||
const canAddMore = properties.length > sorts.length + (draft ? 1 : 0);
|
||||
|
||||
return (
|
||||
<Popover
|
||||
|
||||
@@ -221,21 +221,10 @@ export type UseBaseTableResult = {
|
||||
persistViewConfig: () => void;
|
||||
};
|
||||
|
||||
export type UseBaseTableOptions = {
|
||||
// When provided, `persistViewConfig` uses this as the authoritative
|
||||
// filter/sorts for the server write. The table's live sorting state is
|
||||
// ignored for that axis so a locally-drafted sort/filter (kept in
|
||||
// `activeView.config` for rendering purposes) cannot leak into the
|
||||
// auto-persist column-layout path. Optional to preserve existing
|
||||
// callers that pass the real baseline as `activeView`.
|
||||
baselineConfig?: ViewConfig;
|
||||
};
|
||||
|
||||
export function useBaseTable(
|
||||
base: IBase | undefined,
|
||||
rows: IBaseRow[],
|
||||
activeView: IBaseView | undefined,
|
||||
opts: UseBaseTableOptions = {},
|
||||
): UseBaseTableResult {
|
||||
const updateViewMutation = useUpdateViewMutation();
|
||||
const persistTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
@@ -390,15 +379,7 @@ export function useBaseTable(
|
||||
|
||||
persistTimerRef.current = setTimeout(() => {
|
||||
persistTimerRef.current = null;
|
||||
// `baseline` is the server-side-of-truth config. When the caller has
|
||||
// wrapped `activeView` with draft filter/sort values for render, they
|
||||
// pass the pre-wrap config here so we never round-trip drafts through
|
||||
// the column-layout auto-save path.
|
||||
const baseline = opts.baselineConfig ?? activeView.config;
|
||||
const config = buildViewConfigFromTable(table, baseline, {
|
||||
sorts: baseline?.sorts,
|
||||
filter: baseline?.filter,
|
||||
});
|
||||
const config = buildViewConfigFromTable(table, activeView.config);
|
||||
updateViewMutation.mutate(
|
||||
{ viewId: activeView.id, baseId: base.id, config },
|
||||
{
|
||||
@@ -412,7 +393,7 @@ export function useBaseTable(
|
||||
},
|
||||
);
|
||||
}, 300);
|
||||
}, [activeView, base, table, updateViewMutation, opts.baselineConfig]);
|
||||
}, [activeView, base, table, updateViewMutation]);
|
||||
|
||||
return { table, persistViewConfig };
|
||||
}
|
||||
|
||||
@@ -1,156 +0,0 @@
|
||||
import { useCallback, useMemo } from "react";
|
||||
import { useAtom } from "jotai";
|
||||
import { RESET } from "jotai/utils";
|
||||
import {
|
||||
BaseViewDraft,
|
||||
FilterGroup,
|
||||
ViewConfig,
|
||||
ViewSortConfig,
|
||||
} from "@/features/base/types/base.types";
|
||||
import { viewDraftAtomFamily } from "@/features/base/atoms/view-draft-atom";
|
||||
|
||||
export type UseViewDraftArgs = {
|
||||
userId: string | undefined;
|
||||
baseId: string | undefined;
|
||||
viewId: string | undefined;
|
||||
baselineFilter: FilterGroup | undefined;
|
||||
baselineSorts: ViewSortConfig[] | undefined;
|
||||
};
|
||||
|
||||
export type ViewDraftState = {
|
||||
draft: BaseViewDraft | null;
|
||||
effectiveFilter: FilterGroup | undefined;
|
||||
effectiveSorts: ViewSortConfig[] | undefined;
|
||||
isDirty: boolean;
|
||||
setFilter: (filter: FilterGroup | undefined) => void;
|
||||
setSorts: (sorts: ViewSortConfig[] | undefined) => void;
|
||||
reset: () => void;
|
||||
buildPromotedConfig: (baseline: ViewConfig) => ViewConfig;
|
||||
};
|
||||
|
||||
// JSON-stringify equality is good enough for FilterGroup (pure data tree)
|
||||
// and ViewSortConfig[] — V8 preserves non-numeric key insertion order so
|
||||
// the same object graph serializes identically. Avoids pulling in
|
||||
// lodash/fast-deep-equal for two known-shaped types. (Spec "Dirty check".)
|
||||
function filterEq(a: FilterGroup | undefined, b: FilterGroup | undefined) {
|
||||
return JSON.stringify(a ?? null) === JSON.stringify(b ?? null);
|
||||
}
|
||||
function sortsEq(
|
||||
a: ViewSortConfig[] | undefined,
|
||||
b: ViewSortConfig[] | undefined,
|
||||
) {
|
||||
return JSON.stringify(a ?? null) === JSON.stringify(b ?? null);
|
||||
}
|
||||
|
||||
export function useViewDraft(args: UseViewDraftArgs): ViewDraftState {
|
||||
const { userId, baseId, viewId, baselineFilter, baselineSorts } = args;
|
||||
const ready = !!(userId && baseId && viewId);
|
||||
|
||||
// Always mount an atom with a stable shape so hook order is consistent.
|
||||
// When not ready we still feed a key, but we won't read/write it.
|
||||
const atomKey = useMemo(
|
||||
() => ({
|
||||
userId: userId ?? "",
|
||||
baseId: baseId ?? "",
|
||||
viewId: viewId ?? "",
|
||||
}),
|
||||
[userId, baseId, viewId],
|
||||
);
|
||||
const [storedDraft, setDraft] = useAtom(viewDraftAtomFamily(atomKey));
|
||||
|
||||
const draft = ready ? storedDraft : null;
|
||||
|
||||
const setFilter = useCallback(
|
||||
(next: FilterGroup | undefined) => {
|
||||
if (!ready) return;
|
||||
const current = storedDraft ?? null;
|
||||
const mergedFilter = next;
|
||||
const mergedSorts = current?.sorts;
|
||||
if (mergedFilter === undefined && (mergedSorts === undefined || mergedSorts === null)) {
|
||||
setDraft(RESET);
|
||||
return;
|
||||
}
|
||||
setDraft({
|
||||
filter: mergedFilter,
|
||||
sorts: mergedSorts,
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
},
|
||||
[ready, storedDraft, setDraft],
|
||||
);
|
||||
|
||||
const setSorts = useCallback(
|
||||
(next: ViewSortConfig[] | undefined) => {
|
||||
if (!ready) return;
|
||||
const current = storedDraft ?? null;
|
||||
const mergedFilter = current?.filter;
|
||||
const mergedSorts = next;
|
||||
if (mergedFilter === undefined && (mergedSorts === undefined || mergedSorts === null)) {
|
||||
setDraft(RESET);
|
||||
return;
|
||||
}
|
||||
setDraft({
|
||||
filter: mergedFilter,
|
||||
sorts: mergedSorts,
|
||||
updatedAt: new Date().toISOString(),
|
||||
});
|
||||
},
|
||||
[ready, storedDraft, setDraft],
|
||||
);
|
||||
|
||||
const reset = useCallback(() => {
|
||||
if (!ready) return;
|
||||
setDraft(RESET);
|
||||
}, [ready, setDraft]);
|
||||
|
||||
const effectiveFilter = useMemo(
|
||||
() => (draft?.filter !== undefined ? draft.filter : baselineFilter),
|
||||
[draft?.filter, baselineFilter],
|
||||
);
|
||||
const effectiveSorts = useMemo(
|
||||
() => (draft?.sorts !== undefined ? draft.sorts : baselineSorts),
|
||||
[draft?.sorts, baselineSorts],
|
||||
);
|
||||
|
||||
const isDirty = useMemo(() => {
|
||||
if (!draft) return false;
|
||||
const filterDirty =
|
||||
draft.filter !== undefined && !filterEq(draft.filter, baselineFilter);
|
||||
const sortsDirty =
|
||||
draft.sorts !== undefined && !sortsEq(draft.sorts, baselineSorts);
|
||||
return filterDirty || sortsDirty;
|
||||
}, [draft, baselineFilter, baselineSorts]);
|
||||
|
||||
const buildPromotedConfig = useCallback(
|
||||
(baseline: ViewConfig): ViewConfig => ({
|
||||
...baseline,
|
||||
filter: draft?.filter ?? baseline.filter,
|
||||
sorts: draft?.sorts ?? baseline.sorts,
|
||||
}),
|
||||
[draft],
|
||||
);
|
||||
|
||||
if (!ready) {
|
||||
return {
|
||||
draft: null,
|
||||
effectiveFilter: baselineFilter,
|
||||
effectiveSorts: baselineSorts,
|
||||
isDirty: false,
|
||||
setFilter: () => {},
|
||||
setSorts: () => {},
|
||||
reset: () => {},
|
||||
buildPromotedConfig: (baseline) => baseline,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
draft,
|
||||
effectiveFilter,
|
||||
effectiveSorts,
|
||||
isDirty,
|
||||
setFilter,
|
||||
setSorts,
|
||||
reset,
|
||||
buildPromotedConfig,
|
||||
};
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { useMemo } from "react";
|
||||
import api from "@/lib/api-client";
|
||||
|
||||
export type ResolvedPage = {
|
||||
id: string;
|
||||
slugId: string;
|
||||
title: string | null;
|
||||
icon: string | null;
|
||||
spaceId: string;
|
||||
space: { id: string; slug: string; name: string } | null;
|
||||
};
|
||||
|
||||
async function resolvePages(pageIds: string[]): Promise<ResolvedPage[]> {
|
||||
if (pageIds.length === 0) return [];
|
||||
const res = await api.post<{ items: ResolvedPage[] }>(
|
||||
"/bases/pages/resolve",
|
||||
{ pageIds },
|
||||
);
|
||||
return res.data.items;
|
||||
}
|
||||
|
||||
// Stable, sorted, deduped list so the query key is consistent across renders
|
||||
// no matter what order the caller hands us the ids in.
|
||||
function normalize(ids: (string | null | undefined)[]): string[] {
|
||||
const set = new Set<string>();
|
||||
for (const id of ids) {
|
||||
if (typeof id === "string" && id.length > 0) set.add(id);
|
||||
}
|
||||
return Array.from(set).sort();
|
||||
}
|
||||
|
||||
export type PageResolution = {
|
||||
// Map distinguishes three states via lookup:
|
||||
// - key absent → id not requested
|
||||
// - value undefined → still resolving (query pending, or stale fetch in flight)
|
||||
// - value null → resolved and not accessible (deleted, restricted, cross-workspace)
|
||||
// - value ResolvedPage → resolved and accessible
|
||||
pages: Map<string, ResolvedPage | null | undefined>;
|
||||
isLoading: boolean;
|
||||
};
|
||||
|
||||
export function useResolvedPages(
|
||||
pageIds: (string | null | undefined)[],
|
||||
): PageResolution {
|
||||
const normalized = useMemo(() => normalize(pageIds), [pageIds]);
|
||||
|
||||
const { data, isSuccess, isLoading } = useQuery({
|
||||
queryKey: ["bases", "pages", "resolve", normalized],
|
||||
queryFn: () => resolvePages(normalized),
|
||||
enabled: normalized.length > 0,
|
||||
staleTime: 30_000,
|
||||
gcTime: 5 * 60_000,
|
||||
});
|
||||
|
||||
const pages = useMemo(() => {
|
||||
const map = new Map<string, ResolvedPage | null | undefined>();
|
||||
// Seed with undefined (= "still resolving") until the fetch succeeds.
|
||||
for (const id of normalized) map.set(id, isSuccess ? null : undefined);
|
||||
for (const item of data ?? []) map.set(item.id, item);
|
||||
return map;
|
||||
}, [normalized, data, isSuccess]);
|
||||
|
||||
return { pages, isLoading };
|
||||
}
|
||||
@@ -295,48 +295,3 @@
|
||||
white-space: nowrap;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.pagePill {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
padding: 1px 6px;
|
||||
border-radius: var(--mantine-radius-sm);
|
||||
font-size: var(--mantine-font-size-xs);
|
||||
color: light-dark(var(--mantine-color-blue-7), var(--mantine-color-blue-4));
|
||||
background-color: light-dark(var(--mantine-color-gray-1), var(--mantine-color-dark-5));
|
||||
text-decoration: none;
|
||||
max-width: 100%;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.pagePill:hover {
|
||||
background-color: light-dark(var(--mantine-color-gray-2), var(--mantine-color-dark-4));
|
||||
}
|
||||
|
||||
.pagePillIcon {
|
||||
display: inline-flex;
|
||||
font-size: 14px;
|
||||
line-height: 1;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.pagePillIconFallback {
|
||||
color: light-dark(var(--mantine-color-gray-6), var(--mantine-color-dark-2));
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.pagePillText {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.pageMissing {
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 4px;
|
||||
font-size: var(--mantine-font-size-xs);
|
||||
color: light-dark(var(--mantine-color-gray-5), var(--mantine-color-dark-3));
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ export type BasePropertyType =
|
||||
| 'date'
|
||||
| 'person'
|
||||
| 'file'
|
||||
| 'page'
|
||||
| 'checkbox'
|
||||
| 'url'
|
||||
| 'email'
|
||||
@@ -64,8 +63,6 @@ export type PersonTypeOptions = {
|
||||
allowMultiple?: boolean;
|
||||
};
|
||||
|
||||
export type PageTypeOptions = Record<string, never>;
|
||||
|
||||
export type TypeOptions =
|
||||
| SelectTypeOptions
|
||||
| NumberTypeOptions
|
||||
@@ -75,7 +72,6 @@ export type TypeOptions =
|
||||
| UrlTypeOptions
|
||||
| EmailTypeOptions
|
||||
| PersonTypeOptions
|
||||
| PageTypeOptions
|
||||
| Record<string, unknown>;
|
||||
|
||||
export type IBaseProperty = {
|
||||
@@ -299,14 +295,3 @@ export type UpdatePropertyResult = {
|
||||
// when the job finished migrating cells.
|
||||
jobId: string | null;
|
||||
};
|
||||
|
||||
// Local-first draft of filter / sort tweaks for a single view, stored in
|
||||
// localStorage scoped to (userId, baseId, viewId). An absent `filter` or
|
||||
// `sorts` field means "inherit the baseline for that axis". See
|
||||
// `.claude/superpowers/specs/2026-04-20-base-view-draft-design.md`.
|
||||
export type BaseViewDraft = {
|
||||
filter?: FilterGroup;
|
||||
sorts?: ViewSortConfig[];
|
||||
// ISO timestamp written on each put; diagnostic only, not read by logic.
|
||||
updatedAt: string;
|
||||
};
|
||||
|
||||
@@ -9,11 +9,9 @@ export enum SpaceCaslSubject {
|
||||
Settings = "settings",
|
||||
Member = "member",
|
||||
Page = "page",
|
||||
Base = "base",
|
||||
}
|
||||
|
||||
export type SpaceAbility =
|
||||
| [SpaceCaslAction, SpaceCaslSubject.Settings]
|
||||
| [SpaceCaslAction, SpaceCaslSubject.Member]
|
||||
| [SpaceCaslAction, SpaceCaslSubject.Page]
|
||||
| [SpaceCaslAction, SpaceCaslSubject.Base];
|
||||
| [SpaceCaslAction, SpaceCaslSubject.Page];
|
||||
|
||||
@@ -37,6 +37,7 @@
|
||||
"@aws-sdk/lib-storage": "3.1014.0",
|
||||
"@aws-sdk/s3-request-presigner": "3.1014.0",
|
||||
"@clickhouse/client": "^1.18.2",
|
||||
"@duckdb/node-api": "1.5.2-r.1",
|
||||
"@fastify/cookie": "^11.0.2",
|
||||
"@fastify/multipart": "^9.4.0",
|
||||
"@fastify/static": "^9.0.0",
|
||||
|
||||
@@ -9,15 +9,18 @@ import { BasePropertyService } from './services/base-property.service';
|
||||
import { BaseRowService } from './services/base-row.service';
|
||||
import { BaseViewService } from './services/base-view.service';
|
||||
import { BaseCsvExportService } from './services/base-csv-export.service';
|
||||
import { BasePageResolverService } from './services/base-page-resolver.service';
|
||||
import { BaseQueueProcessor } from './processors/base-queue.processor';
|
||||
import { BaseWsService } from './realtime/base-ws.service';
|
||||
import { BaseWsConsumers } from './realtime/base-ws-consumers';
|
||||
import { BasePresenceService } from './realtime/base-presence.service';
|
||||
import { QueueName } from '../../integrations/queue/constants';
|
||||
import { QueryCacheModule } from './query-cache/query-cache.module';
|
||||
|
||||
@Module({
|
||||
imports: [BullModule.registerQueue({ name: QueueName.BASE_QUEUE })],
|
||||
imports: [
|
||||
BullModule.registerQueue({ name: QueueName.BASE_QUEUE }),
|
||||
QueryCacheModule,
|
||||
],
|
||||
controllers: [
|
||||
BaseController,
|
||||
BasePropertyController,
|
||||
@@ -30,7 +33,6 @@ import { QueueName } from '../../integrations/queue/constants';
|
||||
BaseRowService,
|
||||
BaseViewService,
|
||||
BaseCsvExportService,
|
||||
BasePageResolverService,
|
||||
BaseQueueProcessor,
|
||||
BasePresenceService,
|
||||
BaseWsService,
|
||||
|
||||
@@ -9,7 +9,6 @@ export const BasePropertyType = {
|
||||
DATE: 'date',
|
||||
PERSON: 'person',
|
||||
FILE: 'file',
|
||||
PAGE: 'page',
|
||||
CHECKBOX: 'checkbox',
|
||||
URL: 'url',
|
||||
EMAIL: 'email',
|
||||
@@ -115,7 +114,6 @@ const typeOptionsSchemaMap: Record<BasePropertyTypeValue, z.ZodType> = {
|
||||
[BasePropertyType.DATE]: dateTypeOptionsSchema,
|
||||
[BasePropertyType.PERSON]: personTypeOptionsSchema,
|
||||
[BasePropertyType.FILE]: emptyTypeOptionsSchema,
|
||||
[BasePropertyType.PAGE]: emptyTypeOptionsSchema,
|
||||
[BasePropertyType.CHECKBOX]: checkboxTypeOptionsSchema,
|
||||
[BasePropertyType.URL]: urlTypeOptionsSchema,
|
||||
[BasePropertyType.EMAIL]: emailTypeOptionsSchema,
|
||||
@@ -161,7 +159,6 @@ const cellValueSchemaMap: Partial<Record<BasePropertyTypeValue, z.ZodType>> = {
|
||||
fileSize: z.number().optional(),
|
||||
filePath: z.string().optional(),
|
||||
})),
|
||||
[BasePropertyType.PAGE]: z.uuid(),
|
||||
[BasePropertyType.CHECKBOX]: z.boolean(),
|
||||
[BasePropertyType.URL]: z.url(),
|
||||
[BasePropertyType.EMAIL]: z.email(),
|
||||
@@ -195,7 +192,6 @@ export type CellConversionContext = {
|
||||
fromTypeOptions?: unknown;
|
||||
userNames?: Map<string, string>;
|
||||
attachmentNames?: Map<string, string>;
|
||||
pageTitles?: Map<string, string>;
|
||||
};
|
||||
|
||||
function resolveChoiceName(
|
||||
@@ -260,16 +256,6 @@ export function attemptCellConversion(
|
||||
.filter((v): v is string => typeof v === 'string' && v.length > 0);
|
||||
return { converted: true, value: parts.join(', ') };
|
||||
}
|
||||
if (fromType === BasePropertyType.PAGE && typeof value === 'string') {
|
||||
const title = ctx.pageTitles?.get(value);
|
||||
return { converted: true, value: title ?? '' };
|
||||
}
|
||||
}
|
||||
|
||||
// Page cells only accept a page UUID. Free text / other IDs can't be
|
||||
// coerced into a valid page reference — drop to null.
|
||||
if (toType === BasePropertyType.PAGE && fromType !== BasePropertyType.PAGE) {
|
||||
return { converted: true, value: null };
|
||||
}
|
||||
|
||||
const targetSchema = cellValueSchemaMap[toType];
|
||||
|
||||
@@ -12,13 +12,11 @@ import {
|
||||
import { FastifyReply } from 'fastify';
|
||||
import { BaseService } from '../services/base.service';
|
||||
import { BaseCsvExportService } from '../services/base-csv-export.service';
|
||||
import { BasePageResolverService } from '../services/base-page-resolver.service';
|
||||
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
|
||||
import { CreateBaseDto } from '../dto/create-base.dto';
|
||||
import { UpdateBaseDto } from '../dto/update-base.dto';
|
||||
import { BaseIdDto } from '../dto/base.dto';
|
||||
import { ExportBaseCsvDto } from '../dto/export-base.dto';
|
||||
import { ResolvePagesDto } from '../dto/resolve-pages.dto';
|
||||
import { AuthUser } from '../../../common/decorators/auth-user.decorator';
|
||||
import { AuthWorkspace } from '../../../common/decorators/auth-workspace.decorator';
|
||||
import { JwtAuthGuard } from '../../../common/guards/jwt-auth.guard';
|
||||
@@ -37,7 +35,6 @@ export class BaseController {
|
||||
constructor(
|
||||
private readonly baseService: BaseService,
|
||||
private readonly baseCsvExportService: BaseCsvExportService,
|
||||
private readonly basePageResolverService: BasePageResolverService,
|
||||
private readonly baseRepo: BaseRepo,
|
||||
private readonly spaceAbility: SpaceAbilityFactory,
|
||||
) {}
|
||||
@@ -141,19 +138,4 @@ export class BaseController {
|
||||
res,
|
||||
);
|
||||
}
|
||||
|
||||
@HttpCode(HttpStatus.OK)
|
||||
@Post('pages/resolve')
|
||||
async resolvePages(
|
||||
@Body() dto: ResolvePagesDto,
|
||||
@AuthUser() user: User,
|
||||
@AuthWorkspace() workspace: Workspace,
|
||||
) {
|
||||
const items = await this.basePageResolverService.resolvePages(
|
||||
dto.pageIds,
|
||||
workspace.id,
|
||||
user.id,
|
||||
);
|
||||
return { items };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import { ArrayMaxSize, ArrayMinSize, IsArray, IsUUID } from 'class-validator';
|
||||
|
||||
export class ResolvePagesDto {
|
||||
@IsArray()
|
||||
@ArrayMinSize(1)
|
||||
@ArrayMaxSize(100)
|
||||
@IsUUID('all', { each: true })
|
||||
pageIds: string[];
|
||||
}
|
||||
@@ -9,7 +9,6 @@ export const PropertyKind = {
|
||||
MULTI: 'multi',
|
||||
PERSON: 'person',
|
||||
FILE: 'file',
|
||||
PAGE: 'page',
|
||||
SYS_USER: 'sys_user',
|
||||
} as const;
|
||||
|
||||
@@ -38,8 +37,6 @@ export function propertyKind(type: string): PropertyKindValue | null {
|
||||
return PropertyKind.PERSON;
|
||||
case BasePropertyType.FILE:
|
||||
return PropertyKind.FILE;
|
||||
case BasePropertyType.PAGE:
|
||||
return PropertyKind.PAGE;
|
||||
case BasePropertyType.LAST_EDITED_BY:
|
||||
return PropertyKind.SYS_USER;
|
||||
default:
|
||||
|
||||
@@ -66,8 +66,6 @@ function buildCondition(
|
||||
return personCondition(eb, cond, prop);
|
||||
case PropertyKind.FILE:
|
||||
return arrayOfIdsCondition(eb, cond);
|
||||
case PropertyKind.PAGE:
|
||||
return pageCondition(eb, cond);
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
@@ -294,48 +292,6 @@ function personCondition(
|
||||
}
|
||||
}
|
||||
|
||||
function pageCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
// Page cells store a single page uuid as text. Shape matches selectCondition.
|
||||
const expr = textCell(cond.propertyId);
|
||||
const val = cond.value;
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '=', ''),
|
||||
]);
|
||||
case 'isNotEmpty':
|
||||
return eb.and([
|
||||
eb(expr as any, 'is not', null),
|
||||
eb(expr as any, '!=', ''),
|
||||
]);
|
||||
case 'eq':
|
||||
return val == null ? FALSE : eb(expr as any, '=', String(val));
|
||||
case 'neq':
|
||||
return val == null
|
||||
? FALSE
|
||||
: eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, '!=', String(val)),
|
||||
]);
|
||||
case 'any': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return FALSE;
|
||||
return eb(expr as any, 'in', arr);
|
||||
}
|
||||
case 'none': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return TRUE;
|
||||
return eb.or([
|
||||
eb(expr as any, 'is', null),
|
||||
eb(expr as any, 'not in', arr),
|
||||
]);
|
||||
}
|
||||
default:
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
function arrayOfIdsCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
|
||||
const expr = arrayCell(cond.propertyId);
|
||||
const val = cond.value;
|
||||
|
||||
@@ -87,16 +87,4 @@ describe('serializeCellForCsv', () => {
|
||||
expect(serializeCellForCsv(prop, 'u2', { userNames })).toBe('Bob');
|
||||
expect(serializeCellForCsv(prop, 'missing', { userNames })).toBe('');
|
||||
});
|
||||
|
||||
it('page resolves via pageTitles', () => {
|
||||
const pageTitles = new Map([
|
||||
['p1', 'Launch plan'],
|
||||
['p2', 'Retro notes'],
|
||||
]);
|
||||
const prop = p(BasePropertyType.PAGE);
|
||||
expect(serializeCellForCsv(prop, 'p1', { pageTitles })).toBe('Launch plan');
|
||||
expect(serializeCellForCsv(prop, 'missing', { pageTitles })).toBe('');
|
||||
expect(serializeCellForCsv(prop, 'p1', {})).toBe('');
|
||||
expect(serializeCellForCsv(prop, 123, { pageTitles })).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,6 @@ import { BasePropertyType, BasePropertyTypeValue } from '../base.schemas';
|
||||
|
||||
export type CellCsvContext = {
|
||||
userNames?: Map<string, string>;
|
||||
pageTitles?: Map<string, string>;
|
||||
};
|
||||
|
||||
type PropertyLike = {
|
||||
@@ -82,10 +81,6 @@ export function serializeCellForCsv(
|
||||
case BasePropertyType.LAST_EDITED_BY:
|
||||
return resolveUser(value, ctx);
|
||||
|
||||
case BasePropertyType.PAGE:
|
||||
if (typeof value !== 'string') return '';
|
||||
return ctx.pageTitles?.get(value) ?? '';
|
||||
|
||||
default:
|
||||
return typeof value === 'object' ? JSON.stringify(value) : String(value);
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,667 @@
|
||||
import {
|
||||
Injectable,
|
||||
Logger,
|
||||
OnApplicationBootstrap,
|
||||
OnModuleDestroy,
|
||||
Optional,
|
||||
} from '@nestjs/common';
|
||||
import { RedisService } from '@nestjs-labs/nestjs-ioredis';
|
||||
import type { Redis } from 'ioredis';
|
||||
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
|
||||
import { BaseRow } from '@docmost/db/types/entity.types';
|
||||
import {
|
||||
CursorPaginationResult,
|
||||
emptyCursorPaginationResult,
|
||||
} from '@docmost/db/pagination/cursor-pagination';
|
||||
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
|
||||
import {
|
||||
CURSOR_TAIL_KEYS,
|
||||
FilterNode,
|
||||
PropertySchema,
|
||||
SearchSpec,
|
||||
SortBuild,
|
||||
SortSpec,
|
||||
buildSorts,
|
||||
makeCursor,
|
||||
} from '../engine';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { CollectionLoader } from './collection-loader';
|
||||
import { buildDuckDbListQuery } from './duckdb-query-builder';
|
||||
import { DuckDbRuntime } from './duckdb-runtime';
|
||||
import { BasePropertyType } from '../base.schemas';
|
||||
import {
|
||||
ChangeEnvelope,
|
||||
ColumnSpec,
|
||||
LoadedCollection,
|
||||
} from './query-cache.types';
|
||||
import { EnvironmentService } from '../../../integrations/environment/environment.service';
|
||||
|
||||
export type CacheListOpts = {
|
||||
filter?: FilterNode;
|
||||
sorts?: SortSpec[];
|
||||
search?: SearchSpec;
|
||||
schema: PropertySchema;
|
||||
pagination: PaginationOptions;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class BaseQueryCacheService
|
||||
implements OnApplicationBootstrap, OnModuleDestroy
|
||||
{
|
||||
private readonly logger = new Logger(BaseQueryCacheService.name);
|
||||
private readonly collections = new Map<string, LoadedCollection>();
|
||||
private readonly inFlightLoads = new Map<string, Promise<LoadedCollection>>();
|
||||
|
||||
/*
|
||||
* Serializes every write-path call into the shared writer connection.
|
||||
* DuckDB connections aren't thread-safe for concurrent prepared statements,
|
||||
* and Redis pub/sub can fire `applyChange` calls concurrently since the
|
||||
* subscriber's `pmessage` handler doesn't await. We funnel all writes
|
||||
* (`upsertRow`, `deleteRow`, `updatePosition`, `refreshRowCount`,
|
||||
* `invalidate`, `evictLru`) through this simple Promise chain so only
|
||||
* one is in flight at a time. Reads are unaffected — they flow through
|
||||
* the reader pool, which handles its own concurrency.
|
||||
*/
|
||||
private writeQueue: Promise<void> = Promise.resolve();
|
||||
|
||||
private async serializeWrite<T>(fn: () => Promise<T>): Promise<T> {
|
||||
const prev = this.writeQueue;
|
||||
let unblock!: () => void;
|
||||
this.writeQueue = new Promise<void>((resolve) => { unblock = resolve; });
|
||||
try {
|
||||
await prev;
|
||||
return await fn();
|
||||
} finally {
|
||||
unblock();
|
||||
}
|
||||
}
|
||||
|
||||
constructor(
|
||||
private readonly configProvider: QueryCacheConfigProvider,
|
||||
private readonly baseRepo: BaseRepo,
|
||||
private readonly collectionLoader: CollectionLoader,
|
||||
private readonly runtime: DuckDbRuntime,
|
||||
@Optional() private readonly redisService: RedisService | null = null,
|
||||
@Optional() private readonly env: EnvironmentService | null = null,
|
||||
) {}
|
||||
|
||||
async onApplicationBootstrap(): Promise<void> {
|
||||
const { enabled, warmTopN } = this.configProvider.config;
|
||||
if (!enabled) return;
|
||||
if (!this.runtime.isReady()) {
|
||||
this.logger.warn('runtime not ready; skipping warm-up');
|
||||
return;
|
||||
}
|
||||
|
||||
const redis = this.tryGetRedisClient();
|
||||
if (!redis) return;
|
||||
|
||||
try {
|
||||
const ids = await redis.zrevrange(
|
||||
'base-query-cache:recent',
|
||||
0,
|
||||
warmTopN - 1,
|
||||
);
|
||||
for (const baseId of ids) {
|
||||
try {
|
||||
const base = await this.baseRepo.findById(baseId);
|
||||
if (!base) continue;
|
||||
await this.ensureLoaded(baseId, base.workspaceId);
|
||||
} catch (err) {
|
||||
this.logger.debug(
|
||||
`warm-up skipped ${baseId}: ${(err as Error).message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
this.logger.log(`Warmed ${ids.length} collections on boot`);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.logger.warn(`Warm-up failed: ${error.message}`);
|
||||
if (error.stack) this.logger.warn(error.stack);
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleDestroy(): Promise<void> {
|
||||
// The runtime owns the instance/connection lifecycle; we just clear
|
||||
// our metadata. DETACH is a no-op during shutdown because the instance
|
||||
// is closing anyway.
|
||||
this.collections.clear();
|
||||
}
|
||||
|
||||
async list(
|
||||
baseId: string,
|
||||
workspaceId: string,
|
||||
opts: CacheListOpts,
|
||||
): Promise<CursorPaginationResult<BaseRow>> {
|
||||
const debug = this.env?.getBaseQueryCacheDebug() ?? false;
|
||||
const trace = this.env?.getBaseQueryCacheTrace?.() ?? false;
|
||||
const tStart = debug ? Date.now() : 0;
|
||||
|
||||
const tEnsure = debug ? Date.now() : 0;
|
||||
const collection = await this.ensureLoaded(baseId, workspaceId);
|
||||
const ensureMs = debug ? Date.now() - tEnsure : 0;
|
||||
|
||||
const sortBuilds: SortBuild[] =
|
||||
opts.sorts && opts.sorts.length > 0
|
||||
? buildSorts(opts.sorts, opts.schema)
|
||||
: [];
|
||||
const cursor = makeCursor(sortBuilds, CURSOR_TAIL_KEYS);
|
||||
const sortFieldKeys = sortBuilds.map((s) => s.key);
|
||||
const allFieldKeys = [...sortFieldKeys, 'position', 'id'];
|
||||
|
||||
let afterKeys: Record<string, unknown> | undefined;
|
||||
if (opts.pagination.cursor) {
|
||||
const decoded = cursor.decodeCursor(opts.pagination.cursor, allFieldKeys);
|
||||
afterKeys = cursor.parseCursor(decoded);
|
||||
}
|
||||
|
||||
const { sql, params } = buildDuckDbListQuery({
|
||||
columns: collection.columns,
|
||||
filter: opts.filter,
|
||||
sorts: opts.sorts,
|
||||
search: opts.search,
|
||||
pagination: {
|
||||
limit: opts.pagination.limit,
|
||||
afterKeys: afterKeys as any,
|
||||
},
|
||||
schema: collection.schema,
|
||||
});
|
||||
|
||||
if (trace) {
|
||||
console.log(
|
||||
'[cache-trace]',
|
||||
JSON.stringify({
|
||||
phase: 'query.sql',
|
||||
baseId: baseId.slice(0, 8),
|
||||
schema: collection.schema,
|
||||
sql,
|
||||
params,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const tExec = debug ? Date.now() : 0;
|
||||
const duckRows = await this.runtime.withReader(async (conn) => {
|
||||
const prepared = await conn.prepare(sql);
|
||||
for (let i = 0; i < params.length; i++) {
|
||||
const p = params[i];
|
||||
const oneBased = i + 1;
|
||||
if (p === null || p === undefined) {
|
||||
prepared.bindNull(oneBased);
|
||||
} else if (typeof p === 'string') {
|
||||
prepared.bindVarchar(oneBased, p);
|
||||
} else if (typeof p === 'number') {
|
||||
prepared.bindDouble(oneBased, p);
|
||||
} else if (typeof p === 'boolean') {
|
||||
prepared.bindBoolean(oneBased, p);
|
||||
} else if (p instanceof Date) {
|
||||
prepared.bindVarchar(oneBased, p.toISOString());
|
||||
} else {
|
||||
prepared.bindVarchar(oneBased, JSON.stringify(p));
|
||||
}
|
||||
}
|
||||
const reader = await prepared.runAndReadAll();
|
||||
return reader.getRowObjectsJS();
|
||||
});
|
||||
const execMs = debug ? Date.now() - tExec : 0;
|
||||
|
||||
const hasNextPage = duckRows.length > opts.pagination.limit;
|
||||
if (hasNextPage) duckRows.pop();
|
||||
|
||||
if (duckRows.length === 0) {
|
||||
if (debug) {
|
||||
console.log(
|
||||
'[cache-perf]',
|
||||
JSON.stringify({
|
||||
phase: 'cache.list',
|
||||
baseId: baseId.slice(0, 8),
|
||||
totalMs: Date.now() - tStart,
|
||||
ensureMs,
|
||||
execMs,
|
||||
shapeMs: 0,
|
||||
rows: 0,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return emptyCursorPaginationResult<BaseRow>(opts.pagination.limit);
|
||||
}
|
||||
|
||||
const tShape = debug ? Date.now() : 0;
|
||||
const items = duckRows.map((r) =>
|
||||
shapeBaseRow(r, collection.columns),
|
||||
);
|
||||
const shapeMs = debug ? Date.now() - tShape : 0;
|
||||
|
||||
const endRow = duckRows[duckRows.length - 1];
|
||||
const startRow = duckRows[0];
|
||||
const encodeFromRow = (raw: Record<string, unknown>): string => {
|
||||
const entries: Array<[string, unknown]> = [];
|
||||
for (const sb of sortBuilds) entries.push([sb.key, raw[sb.key]]);
|
||||
entries.push(['position', raw.position]);
|
||||
entries.push(['id', raw.id]);
|
||||
return cursor.encodeCursor(entries);
|
||||
};
|
||||
|
||||
const hasPrevPage = !!opts.pagination.cursor;
|
||||
const nextCursor = hasNextPage ? encodeFromRow(endRow) : null;
|
||||
const prevCursor = hasPrevPage ? encodeFromRow(startRow) : null;
|
||||
|
||||
if (debug) {
|
||||
console.log(
|
||||
'[cache-perf]',
|
||||
JSON.stringify({
|
||||
phase: 'cache.list',
|
||||
baseId: baseId.slice(0, 8),
|
||||
totalMs: Date.now() - tStart,
|
||||
ensureMs,
|
||||
execMs,
|
||||
shapeMs,
|
||||
rows: items.length,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
items,
|
||||
meta: {
|
||||
limit: opts.pagination.limit,
|
||||
hasNextPage,
|
||||
hasPrevPage,
|
||||
nextCursor,
|
||||
prevCursor,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async invalidate(baseId: string): Promise<void> {
|
||||
const collection = this.collections.get(baseId);
|
||||
if (!collection) return;
|
||||
await this.serializeWrite(async () => {
|
||||
await this.runtime.detachBase(collection.schema);
|
||||
});
|
||||
this.collections.delete(baseId);
|
||||
}
|
||||
|
||||
isResident(baseId: string): boolean {
|
||||
return this.collections.has(baseId);
|
||||
}
|
||||
|
||||
residentSize(): number {
|
||||
return this.collections.size;
|
||||
}
|
||||
|
||||
peek(baseId: string): LoadedCollection | undefined {
|
||||
return this.collections.get(baseId);
|
||||
}
|
||||
|
||||
residencySnapshot(): Array<{
|
||||
baseId: string;
|
||||
schema: string;
|
||||
rows: number;
|
||||
approxMb: number;
|
||||
}> {
|
||||
const out: Array<{
|
||||
baseId: string;
|
||||
schema: string;
|
||||
rows: number;
|
||||
approxMb: number;
|
||||
}> = [];
|
||||
for (const [baseId, c] of this.collections) {
|
||||
out.push({
|
||||
baseId,
|
||||
schema: c.schema,
|
||||
rows: c.rowCount,
|
||||
approxMb: +(c.approxBytes / (1024 * 1024)).toFixed(1),
|
||||
});
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
async applyChange(env: ChangeEnvelope): Promise<void> {
|
||||
const trace = this.env?.getBaseQueryCacheTrace?.() ?? false;
|
||||
const collection = this.collections.get(env.baseId);
|
||||
|
||||
if (trace) {
|
||||
console.log(
|
||||
'[cache-trace]',
|
||||
JSON.stringify({
|
||||
phase: 'pubsub.apply',
|
||||
baseId: env.baseId.slice(0, 8),
|
||||
kind: env.kind,
|
||||
resident: !!collection,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
if (!collection) return;
|
||||
|
||||
try {
|
||||
switch (env.kind) {
|
||||
case 'schema-invalidate':
|
||||
if (env.schemaVersion > collection.schemaVersion) {
|
||||
await this.invalidate(env.baseId);
|
||||
}
|
||||
return;
|
||||
case 'row-upsert':
|
||||
await this.upsertRow(collection, env.row);
|
||||
await this.refreshRowCount(collection);
|
||||
return;
|
||||
case 'row-delete':
|
||||
await this.deleteRow(collection, env.rowId);
|
||||
await this.refreshRowCount(collection);
|
||||
return;
|
||||
case 'rows-delete':
|
||||
for (const id of env.rowIds) await this.deleteRow(collection, id);
|
||||
await this.refreshRowCount(collection);
|
||||
return;
|
||||
case 'row-reorder':
|
||||
await this.updatePosition(collection, env.rowId, env.position);
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.logger.warn(
|
||||
`applyChange failed for ${env.baseId}; invalidating: ${error.message}`,
|
||||
);
|
||||
if (error.stack) this.logger.warn(error.stack);
|
||||
await this.invalidate(env.baseId);
|
||||
}
|
||||
}
|
||||
|
||||
private async ensureLoaded(
|
||||
baseId: string,
|
||||
workspaceId: string,
|
||||
): Promise<LoadedCollection> {
|
||||
const debug = this.env?.getBaseQueryCacheDebug() ?? false;
|
||||
const existing = this.collections.get(baseId);
|
||||
|
||||
const tFind = debug ? Date.now() : 0;
|
||||
const base = await this.baseRepo.findById(baseId);
|
||||
const findMs = debug ? Date.now() - tFind : 0;
|
||||
if (!base) throw new Error(`Base ${baseId} not found`);
|
||||
const freshVersion = (base as any).schemaVersion ?? 1;
|
||||
|
||||
if (existing && existing.schemaVersion === freshVersion) {
|
||||
existing.lastAccessedAt = Date.now();
|
||||
this.recordAccess(baseId);
|
||||
if (debug) {
|
||||
console.log(
|
||||
'[cache-perf]',
|
||||
JSON.stringify({
|
||||
phase: 'ensureLoaded.hit',
|
||||
baseId: baseId.slice(0, 8),
|
||||
findMs,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return existing;
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
await this.serializeWrite(async () => {
|
||||
await this.runtime.detachBase(existing.schema);
|
||||
});
|
||||
this.collections.delete(baseId);
|
||||
}
|
||||
|
||||
const inFlight = this.inFlightLoads.get(baseId);
|
||||
if (inFlight) {
|
||||
const loaded = await inFlight;
|
||||
this.recordAccess(baseId);
|
||||
return loaded;
|
||||
}
|
||||
|
||||
const tLoad = debug ? Date.now() : 0;
|
||||
const promise = (async () => {
|
||||
try {
|
||||
const { maxCollections } = this.configProvider.config;
|
||||
if (this.collections.size >= maxCollections) {
|
||||
await this.evictLru();
|
||||
}
|
||||
const loaded = await this.collectionLoader.load(baseId, workspaceId);
|
||||
this.collections.set(baseId, loaded);
|
||||
return loaded;
|
||||
} finally {
|
||||
this.inFlightLoads.delete(baseId);
|
||||
}
|
||||
})();
|
||||
this.inFlightLoads.set(baseId, promise);
|
||||
const loaded = await promise;
|
||||
const loadMs = debug ? Date.now() - tLoad : 0;
|
||||
this.recordAccess(baseId);
|
||||
if (debug) {
|
||||
console.log(
|
||||
'[cache-perf]',
|
||||
JSON.stringify({
|
||||
phase: 'ensureLoaded.miss',
|
||||
baseId: baseId.slice(0, 8),
|
||||
findMs,
|
||||
loadMs,
|
||||
rows: loaded.rowCount,
|
||||
approxMb: +(loaded.approxBytes / (1024 * 1024)).toFixed(1),
|
||||
}),
|
||||
);
|
||||
}
|
||||
return loaded;
|
||||
}
|
||||
|
||||
private async evictLru(): Promise<void> {
|
||||
let oldestKey: string | null = null;
|
||||
let oldestTime = Number.POSITIVE_INFINITY;
|
||||
for (const [key, col] of this.collections) {
|
||||
if (col.lastAccessedAt < oldestTime) {
|
||||
oldestTime = col.lastAccessedAt;
|
||||
oldestKey = key;
|
||||
}
|
||||
}
|
||||
if (oldestKey) {
|
||||
const col = this.collections.get(oldestKey)!;
|
||||
await this.serializeWrite(async () => {
|
||||
await this.runtime.detachBase(col.schema);
|
||||
});
|
||||
this.collections.delete(oldestKey);
|
||||
this.logger.debug(`Evicted LRU collection ${oldestKey}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async upsertRow(
|
||||
collection: LoadedCollection,
|
||||
row: Record<string, unknown>,
|
||||
): Promise<void> {
|
||||
return this.serializeWrite(async () => {
|
||||
const specs = collection.columns;
|
||||
const columnList = specs.map((s) => quoteIdent(s.column)).join(', ');
|
||||
const placeholders = specs.map(() => '?').join(', ');
|
||||
const sql = `INSERT OR REPLACE INTO ${collection.schema}.rows (${columnList}) VALUES (${placeholders})`;
|
||||
|
||||
const writer = this.runtime.getWriter();
|
||||
const prepared = await writer.prepare(sql);
|
||||
for (let i = 0; i < specs.length; i++) {
|
||||
const spec = specs[i];
|
||||
const oneBased = i + 1;
|
||||
const raw = readFromRowEvent(row, spec);
|
||||
if (raw == null) {
|
||||
prepared.bindNull(oneBased);
|
||||
continue;
|
||||
}
|
||||
switch (spec.ddlType) {
|
||||
case 'VARCHAR':
|
||||
prepared.bindVarchar(oneBased, String(raw));
|
||||
break;
|
||||
case 'DOUBLE': {
|
||||
const n = Number(raw);
|
||||
if (Number.isNaN(n)) prepared.bindNull(oneBased);
|
||||
else prepared.bindDouble(oneBased, n);
|
||||
break;
|
||||
}
|
||||
case 'BOOLEAN':
|
||||
prepared.bindBoolean(oneBased, Boolean(raw));
|
||||
break;
|
||||
case 'TIMESTAMPTZ': {
|
||||
const d = raw instanceof Date ? raw : new Date(String(raw));
|
||||
if (Number.isNaN(d.getTime())) prepared.bindNull(oneBased);
|
||||
else prepared.bindVarchar(oneBased, d.toISOString());
|
||||
break;
|
||||
}
|
||||
case 'JSON':
|
||||
prepared.bindVarchar(oneBased, JSON.stringify(raw));
|
||||
break;
|
||||
}
|
||||
}
|
||||
await prepared.run();
|
||||
});
|
||||
}
|
||||
|
||||
private async deleteRow(
|
||||
collection: LoadedCollection,
|
||||
rowId: string,
|
||||
): Promise<void> {
|
||||
return this.serializeWrite(async () => {
|
||||
const writer = this.runtime.getWriter();
|
||||
const prepared = await writer.prepare(
|
||||
`DELETE FROM ${collection.schema}.rows WHERE id = ?`,
|
||||
);
|
||||
prepared.bindVarchar(1, rowId);
|
||||
await prepared.run();
|
||||
});
|
||||
}
|
||||
|
||||
private async updatePosition(
|
||||
collection: LoadedCollection,
|
||||
rowId: string,
|
||||
position: string,
|
||||
): Promise<void> {
|
||||
return this.serializeWrite(async () => {
|
||||
const writer = this.runtime.getWriter();
|
||||
const prepared = await writer.prepare(
|
||||
`UPDATE ${collection.schema}.rows SET position = ? WHERE id = ?`,
|
||||
);
|
||||
prepared.bindVarchar(1, position);
|
||||
prepared.bindVarchar(2, rowId);
|
||||
await prepared.run();
|
||||
});
|
||||
}
|
||||
|
||||
private async refreshRowCount(collection: LoadedCollection): Promise<void> {
|
||||
return this.serializeWrite(async () => {
|
||||
try {
|
||||
const res = await this.runtime.getWriter().runAndReadAll(
|
||||
`SELECT count(*) AS c FROM ${collection.schema}.rows`,
|
||||
);
|
||||
const row = res.getRowObjects()[0] as { c: bigint | number };
|
||||
collection.rowCount = Number(row.c);
|
||||
collection.approxBytes = collection.rowCount * collection.columns.length * 64;
|
||||
} catch {
|
||||
// stale rowCount self-corrects on next reload
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private recordAccess(baseId: string): void {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
const redis = this.tryGetRedisClient();
|
||||
if (!redis) return;
|
||||
const nowMs = Date.now();
|
||||
const maxKeep = this.configProvider.config.maxCollections * 10;
|
||||
void (async () => {
|
||||
try {
|
||||
await redis.zadd('base-query-cache:recent', nowMs, baseId);
|
||||
await redis.zremrangebyrank(
|
||||
'base-query-cache:recent',
|
||||
0,
|
||||
-(maxKeep + 1),
|
||||
);
|
||||
} catch (err) {
|
||||
this.logger.debug(
|
||||
`recordAccess failed for ${baseId}: ${(err as Error).message}`,
|
||||
);
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
private tryGetRedisClient(): Redis | null {
|
||||
if (!this.redisService) return null;
|
||||
try {
|
||||
return this.redisService.getOrNil();
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function quoteIdent(name: string): string {
|
||||
return `"${name.replace(/"/g, '""')}"`;
|
||||
}
|
||||
|
||||
/*
|
||||
* Convert a DuckDB row object back to the BaseRow JSON shape returned to
|
||||
* API callers. Kept inline (not exported) because it's a pure derivation
|
||||
* from the ColumnSpec list.
|
||||
*/
|
||||
function shapeBaseRow(
|
||||
raw: Record<string, unknown>,
|
||||
specs: ColumnSpec[],
|
||||
): BaseRow {
|
||||
const cells: Record<string, unknown> = {};
|
||||
for (const spec of specs) {
|
||||
if (!spec.property) continue;
|
||||
const val = raw[spec.column];
|
||||
if (val == null) continue;
|
||||
if (spec.ddlType === 'JSON' && typeof val === 'string') {
|
||||
try {
|
||||
cells[spec.property.id] = JSON.parse(val);
|
||||
} catch {
|
||||
cells[spec.property.id] = val;
|
||||
}
|
||||
} else {
|
||||
cells[spec.property.id] = val;
|
||||
}
|
||||
}
|
||||
return {
|
||||
id: raw.id as string,
|
||||
baseId: raw.base_id as string,
|
||||
workspaceId: raw.workspace_id as string,
|
||||
creatorId: raw.creator_id as string,
|
||||
position: raw.position as string,
|
||||
createdAt: coerceDate(raw.created_at),
|
||||
updatedAt: coerceDate(raw.updated_at),
|
||||
lastUpdatedById: raw.last_updated_by_id as string,
|
||||
deletedAt: null,
|
||||
cells,
|
||||
} as BaseRow;
|
||||
}
|
||||
|
||||
function coerceDate(v: unknown): Date {
|
||||
if (v instanceof Date) return v;
|
||||
if (typeof v === 'string') return new Date(v);
|
||||
return new Date(0);
|
||||
}
|
||||
|
||||
function readFromRowEvent(
|
||||
row: Record<string, unknown>,
|
||||
spec: ColumnSpec,
|
||||
): unknown {
|
||||
switch (spec.column) {
|
||||
case 'id': return row.id ?? null;
|
||||
case 'base_id': return row.baseId ?? row.base_id ?? null;
|
||||
case 'workspace_id': return row.workspaceId ?? row.workspace_id ?? null;
|
||||
case 'creator_id': return row.creatorId ?? row.creator_id ?? null;
|
||||
case 'position': return row.position ?? null;
|
||||
case 'created_at': return row.createdAt ?? row.created_at ?? null;
|
||||
case 'updated_at': return row.updatedAt ?? row.updated_at ?? null;
|
||||
case 'last_updated_by_id': return row.lastUpdatedById ?? row.last_updated_by_id ?? null;
|
||||
case 'deleted_at': return null;
|
||||
case 'search_text': return '';
|
||||
}
|
||||
const prop = spec.property;
|
||||
if (!prop) return null;
|
||||
if (
|
||||
prop.type === BasePropertyType.CREATED_AT ||
|
||||
prop.type === BasePropertyType.LAST_EDITED_AT ||
|
||||
prop.type === BasePropertyType.LAST_EDITED_BY
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
const cells = (row.cells as Record<string, unknown> | null) ?? {};
|
||||
return cells[prop.id] ?? null;
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
import {
|
||||
Injectable,
|
||||
Logger,
|
||||
OnApplicationBootstrap,
|
||||
OnModuleDestroy,
|
||||
} from '@nestjs/common';
|
||||
import Redis from 'ioredis';
|
||||
import { EnvironmentService } from '../../../integrations/environment/environment.service';
|
||||
import {
|
||||
createRetryStrategy,
|
||||
parseRedisUrl,
|
||||
} from '../../../common/helpers/utils';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { BaseQueryCacheService } from './base-query-cache.service';
|
||||
import { ChangeEnvelope } from './query-cache.types';
|
||||
|
||||
const CHANNEL_PATTERN = 'base-query-cache:changes:*';
|
||||
|
||||
/*
|
||||
* Dedicated ioredis subscriber that forwards change envelopes to the local
|
||||
* BaseQueryCacheService. A separate connection is required because ioredis
|
||||
* puts subscribing clients into subscriber-only mode and the shared client
|
||||
* from RedisService is used for normal commands elsewhere in the app.
|
||||
* When the query-cache is disabled we do not open a Redis connection at all.
|
||||
*/
|
||||
@Injectable()
|
||||
export class BaseQueryCacheSubscriber
|
||||
implements OnApplicationBootstrap, OnModuleDestroy
|
||||
{
|
||||
private readonly logger = new Logger(BaseQueryCacheSubscriber.name);
|
||||
private client: Redis | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly configProvider: QueryCacheConfigProvider,
|
||||
private readonly env: EnvironmentService,
|
||||
private readonly cacheService: BaseQueryCacheService,
|
||||
) {}
|
||||
|
||||
async onApplicationBootstrap(): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
|
||||
const redisUrl = this.env.getRedisUrl();
|
||||
const { family } = parseRedisUrl(redisUrl);
|
||||
|
||||
this.client = new Redis(redisUrl, {
|
||||
family,
|
||||
retryStrategy: createRetryStrategy(),
|
||||
});
|
||||
|
||||
this.client.on('error', (err) => {
|
||||
this.logger.warn(`Subscriber client error: ${err.message}`);
|
||||
});
|
||||
|
||||
this.client.on('pmessage', (_pattern, channel, message) => {
|
||||
this.handleMessage(channel, message).catch((err) => {
|
||||
const error = err as Error;
|
||||
this.logger.warn(
|
||||
`Unhandled error applying change from ${channel}: ${error.message}`,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
try {
|
||||
await this.client.psubscribe(CHANNEL_PATTERN);
|
||||
this.logger.log(`Subscribed to ${CHANNEL_PATTERN}`);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.logger.warn(`Failed to psubscribe: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleDestroy(): Promise<void> {
|
||||
if (!this.client) return;
|
||||
try {
|
||||
await this.client.quit();
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.logger.warn(
|
||||
`Failed to close subscriber client cleanly: ${error.message}`,
|
||||
);
|
||||
}
|
||||
this.client = null;
|
||||
}
|
||||
|
||||
private async handleMessage(
|
||||
channel: string,
|
||||
message: string,
|
||||
): Promise<void> {
|
||||
let envelope: ChangeEnvelope;
|
||||
try {
|
||||
envelope = JSON.parse(message) as ChangeEnvelope;
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.logger.warn(
|
||||
`Dropping malformed cache-change message on ${channel}: ${error.message}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.cacheService.applyChange(envelope);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.logger.warn(
|
||||
`applyChange failed for ${envelope.baseId}: ${error.message}`,
|
||||
);
|
||||
if (error.stack) this.logger.warn(error.stack);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,173 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { OnEvent } from '@nestjs/event-emitter';
|
||||
import { RedisService } from '@nestjs-labs/nestjs-ioredis';
|
||||
import type { Redis } from 'ioredis';
|
||||
import { EventName } from '../../../common/events/event.contants';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import {
|
||||
BasePropertyCreatedEvent,
|
||||
BasePropertyDeletedEvent,
|
||||
BasePropertyUpdatedEvent,
|
||||
BaseRowCreatedEvent,
|
||||
BaseRowDeletedEvent,
|
||||
BaseRowReorderedEvent,
|
||||
BaseRowUpdatedEvent,
|
||||
BaseRowsDeletedEvent,
|
||||
BaseSchemaBumpedEvent,
|
||||
} from '../events/base-events';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { ChangeEnvelope } from './query-cache.types';
|
||||
|
||||
/*
|
||||
* Bridges in-process base domain events onto a Redis pub/sub channel so every
|
||||
* node running the query-cache can keep its resident DuckDB collections in
|
||||
* sync. Each base gets its own channel (`base-query-cache:changes:${baseId}`)
|
||||
* to keep pattern matching cheap. When the feature flag is off this class
|
||||
* registers as a no-op so we pay zero overhead.
|
||||
*/
|
||||
@Injectable()
|
||||
export class BaseQueryCacheWriteConsumer {
|
||||
private readonly logger = new Logger(BaseQueryCacheWriteConsumer.name);
|
||||
private _redis: Redis | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly redisService: RedisService,
|
||||
private readonly configProvider: QueryCacheConfigProvider,
|
||||
private readonly baseRowRepo: BaseRowRepo,
|
||||
) {}
|
||||
|
||||
private get redis(): Redis {
|
||||
if (!this._redis) this._redis = this.redisService.getOrThrow();
|
||||
return this._redis;
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_ROW_CREATED)
|
||||
async onRowCreated(e: BaseRowCreatedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'row-upsert',
|
||||
baseId: e.baseId,
|
||||
row: e.row as unknown as Record<string, unknown>,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_ROW_UPDATED)
|
||||
async onRowUpdated(e: BaseRowUpdatedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
const row = await this.baseRowRepo.findById(e.rowId, {
|
||||
workspaceId: e.workspaceId,
|
||||
});
|
||||
if (!row) return;
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'row-upsert',
|
||||
baseId: e.baseId,
|
||||
row: row as unknown as Record<string, unknown>,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_ROW_DELETED)
|
||||
async onRowDeleted(e: BaseRowDeletedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'row-delete',
|
||||
baseId: e.baseId,
|
||||
rowId: e.rowId,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_ROWS_DELETED)
|
||||
async onRowsDeleted(e: BaseRowsDeletedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'rows-delete',
|
||||
baseId: e.baseId,
|
||||
rowIds: e.rowIds,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_ROW_REORDERED)
|
||||
async onRowReordered(e: BaseRowReorderedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'row-reorder',
|
||||
baseId: e.baseId,
|
||||
rowId: e.rowId,
|
||||
position: e.position,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_SCHEMA_BUMPED)
|
||||
async onSchemaBumped(e: BaseSchemaBumpedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'schema-invalidate',
|
||||
baseId: e.baseId,
|
||||
schemaVersion: e.schemaVersion,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_PROPERTY_UPDATED)
|
||||
async onPropertyUpdated(e: BasePropertyUpdatedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'schema-invalidate',
|
||||
baseId: e.baseId,
|
||||
schemaVersion: e.schemaVersion,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_PROPERTY_CREATED)
|
||||
async onPropertyCreated(e: BasePropertyCreatedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
// Property CREATED / DELETED events don't carry a schemaVersion. Use
|
||||
// Number.MAX_SAFE_INTEGER as a sentinel so `applyChange`'s
|
||||
// `envVersion > cachedVersion` check unconditionally invalidates — any
|
||||
// real schemaVersion will be smaller. A follow-up could plumb the real
|
||||
// schemaVersion through the event payload and drop the sentinel.
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'schema-invalidate',
|
||||
baseId: e.baseId,
|
||||
schemaVersion: Number.MAX_SAFE_INTEGER,
|
||||
});
|
||||
}
|
||||
|
||||
@OnEvent(EventName.BASE_PROPERTY_DELETED)
|
||||
async onPropertyDeleted(e: BasePropertyDeletedEvent): Promise<void> {
|
||||
if (!this.configProvider.config.enabled) return;
|
||||
await this.publish(e.baseId, {
|
||||
kind: 'schema-invalidate',
|
||||
baseId: e.baseId,
|
||||
schemaVersion: Number.MAX_SAFE_INTEGER,
|
||||
});
|
||||
}
|
||||
|
||||
private async publish(
|
||||
baseId: string,
|
||||
envelope: ChangeEnvelope,
|
||||
): Promise<void> {
|
||||
const channel = `base-query-cache:changes:${baseId}`;
|
||||
if (this.configProvider.config.trace) {
|
||||
console.log(
|
||||
'[cache-trace]',
|
||||
JSON.stringify({
|
||||
phase: 'pubsub.publish',
|
||||
baseId,
|
||||
kind: envelope.kind,
|
||||
// Include the row id or similar short discriminator where meaningful,
|
||||
// but don't dump the full envelope — it can be large (row-upsert ships
|
||||
// the whole row).
|
||||
...('rowId' in envelope ? { rowId: envelope.rowId } : {}),
|
||||
...('rowIds' in envelope ? { rowCount: envelope.rowIds.length } : {}),
|
||||
}),
|
||||
);
|
||||
}
|
||||
try {
|
||||
await this.redis.publish(channel, JSON.stringify(envelope));
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.logger.warn(
|
||||
`Failed to publish cache change for ${baseId}: ${error.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,159 @@
|
||||
import { BaseQueryRouter } from './base-query-router';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import { BaseQueryCacheService } from './base-query-cache.service';
|
||||
import { FilterNode, SearchSpec, SortSpec } from '../engine';
|
||||
|
||||
type FakeConfig = { enabled: boolean; minRows: number };
|
||||
|
||||
function makeRouter(
|
||||
cfg: FakeConfig,
|
||||
count: number,
|
||||
): { router: BaseQueryRouter; countSpy: jest.Mock } {
|
||||
const configProvider = {
|
||||
config: {
|
||||
enabled: cfg.enabled,
|
||||
minRows: cfg.minRows,
|
||||
maxCollections: 10,
|
||||
warmTopN: 0,
|
||||
},
|
||||
} as unknown as QueryCacheConfigProvider;
|
||||
|
||||
const countSpy = jest.fn().mockResolvedValue(count);
|
||||
const baseRowRepo = { countActiveRows: countSpy } as unknown as BaseRowRepo;
|
||||
|
||||
// Default fake: always miss, so `decide` falls through to countActiveRows.
|
||||
const fakeCacheService = {
|
||||
peek: () => undefined,
|
||||
} as unknown as BaseQueryCacheService;
|
||||
|
||||
return {
|
||||
router: new BaseQueryRouter(configProvider, baseRowRepo, fakeCacheService),
|
||||
countSpy,
|
||||
};
|
||||
}
|
||||
|
||||
const filter: FilterNode = {
|
||||
op: 'and',
|
||||
children: [
|
||||
{
|
||||
propertyId: 'p1',
|
||||
op: 'eq',
|
||||
value: 'foo',
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const sorts: SortSpec[] = [{ propertyId: 'p1', direction: 'asc' }];
|
||||
|
||||
const trgmSearch: SearchSpec = { query: 'hello', mode: 'trgm' };
|
||||
const ftsSearch: SearchSpec = { query: 'hello', mode: 'fts' };
|
||||
|
||||
const baseArgs = {
|
||||
baseId: 'base-1',
|
||||
workspaceId: 'ws-1',
|
||||
};
|
||||
|
||||
describe('BaseQueryRouter.decide', () => {
|
||||
it('returns postgres when flag is off', async () => {
|
||||
const { router, countSpy } = makeRouter(
|
||||
{ enabled: false, minRows: 10 },
|
||||
1000,
|
||||
);
|
||||
const decision = await router.decide({ ...baseArgs, filter });
|
||||
expect(decision).toBe('postgres');
|
||||
expect(countSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns postgres when row count < minRows', async () => {
|
||||
const { router } = makeRouter({ enabled: true, minRows: 1000 }, 500);
|
||||
const decision = await router.decide({ ...baseArgs, filter });
|
||||
expect(decision).toBe('postgres');
|
||||
});
|
||||
|
||||
it('returns postgres when query has no filter/sort/search', async () => {
|
||||
const { router, countSpy } = makeRouter(
|
||||
{ enabled: true, minRows: 10 },
|
||||
10000,
|
||||
);
|
||||
const decision = await router.decide({ ...baseArgs });
|
||||
expect(decision).toBe('postgres');
|
||||
expect(countSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns postgres when search.mode === "fts" even for large base', async () => {
|
||||
const { router } = makeRouter({ enabled: true, minRows: 10 }, 10000);
|
||||
const decision = await router.decide({ ...baseArgs, search: ftsSearch });
|
||||
expect(decision).toBe('postgres');
|
||||
});
|
||||
|
||||
it('returns cache when flag on + rows >= minRows + has filter', async () => {
|
||||
const { router } = makeRouter({ enabled: true, minRows: 1000 }, 1000);
|
||||
const decision = await router.decide({ ...baseArgs, filter });
|
||||
expect(decision).toBe('cache');
|
||||
});
|
||||
|
||||
it('returns cache when flag on + rows >= minRows + has sort', async () => {
|
||||
const { router } = makeRouter({ enabled: true, minRows: 1000 }, 5000);
|
||||
const decision = await router.decide({ ...baseArgs, sorts });
|
||||
expect(decision).toBe('cache');
|
||||
});
|
||||
|
||||
it('returns postgres when flag on + rows >= minRows + has trgm search (v1 gates search to postgres)', async () => {
|
||||
const { router } = makeRouter({ enabled: true, minRows: 10 }, 10000);
|
||||
const decision = await router.decide({ ...baseArgs, search: trgmSearch });
|
||||
expect(decision).toBe('postgres');
|
||||
});
|
||||
|
||||
it('uses cached row count from resident collection (no Postgres call)', async () => {
|
||||
const countSpy = jest.fn().mockResolvedValue(999999); // shouldn't be called
|
||||
const cacheService = {
|
||||
peek: jest.fn().mockReturnValue({ baseId: 'base-1', rowCount: 50_000 }),
|
||||
} as unknown as BaseQueryCacheService;
|
||||
const router = new BaseQueryRouter(
|
||||
{
|
||||
config: {
|
||||
enabled: true,
|
||||
minRows: 25_000,
|
||||
maxCollections: 10,
|
||||
warmTopN: 0,
|
||||
},
|
||||
} as unknown as QueryCacheConfigProvider,
|
||||
{ countActiveRows: countSpy } as unknown as BaseRowRepo,
|
||||
cacheService,
|
||||
);
|
||||
const decision = await router.decide({
|
||||
...baseArgs,
|
||||
sorts,
|
||||
});
|
||||
expect(decision).toBe('cache');
|
||||
expect((cacheService.peek as jest.Mock)).toHaveBeenCalledWith('base-1');
|
||||
expect(countSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('falls back to Postgres count when collection is not resident', async () => {
|
||||
const countSpy = jest.fn().mockResolvedValue(30_000);
|
||||
const cacheService = {
|
||||
peek: jest.fn().mockReturnValue(undefined),
|
||||
} as unknown as BaseQueryCacheService;
|
||||
const router = new BaseQueryRouter(
|
||||
{
|
||||
config: {
|
||||
enabled: true,
|
||||
minRows: 25_000,
|
||||
maxCollections: 10,
|
||||
warmTopN: 0,
|
||||
},
|
||||
} as unknown as QueryCacheConfigProvider,
|
||||
{ countActiveRows: countSpy } as unknown as BaseRowRepo,
|
||||
cacheService,
|
||||
);
|
||||
const decision = await router.decide({
|
||||
...baseArgs,
|
||||
sorts,
|
||||
});
|
||||
expect(decision).toBe('cache');
|
||||
expect((cacheService.peek as jest.Mock)).toHaveBeenCalledWith('base-1');
|
||||
expect(countSpy).toHaveBeenCalledWith('base-1', { workspaceId: 'ws-1' });
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,118 @@
|
||||
import { Injectable, Optional } from '@nestjs/common';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import type { FilterNode, SearchSpec, SortSpec } from '../engine';
|
||||
import { EnvironmentService } from '../../../integrations/environment/environment.service';
|
||||
import { BaseQueryCacheService } from './base-query-cache.service';
|
||||
|
||||
export type RouteDecision = 'postgres' | 'cache';
|
||||
|
||||
export type RouteDecideArgs = {
|
||||
baseId: string;
|
||||
workspaceId: string;
|
||||
filter?: FilterNode;
|
||||
sorts?: SortSpec[];
|
||||
search?: SearchSpec;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class BaseQueryRouter {
|
||||
constructor(
|
||||
private readonly configProvider: QueryCacheConfigProvider,
|
||||
private readonly baseRowRepo: BaseRowRepo,
|
||||
private readonly cacheService: BaseQueryCacheService,
|
||||
@Optional() private readonly env: EnvironmentService | null = null,
|
||||
) {}
|
||||
|
||||
async decide(args: RouteDecideArgs): Promise<RouteDecision> {
|
||||
const { enabled, minRows } = this.configProvider.config;
|
||||
const trace = this.configProvider.config.trace ?? false;
|
||||
const debug = this.env?.getBaseQueryCacheDebug() ?? false;
|
||||
const tStart = debug ? Date.now() : 0;
|
||||
|
||||
const emit = (route: RouteDecision, reason: string): RouteDecision => {
|
||||
if (trace) {
|
||||
console.log(
|
||||
'[cache-trace]',
|
||||
JSON.stringify({
|
||||
phase: 'router.decision',
|
||||
baseId: args.baseId,
|
||||
route,
|
||||
reason,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return route;
|
||||
};
|
||||
|
||||
if (!enabled) return emit('postgres', 'flag disabled');
|
||||
|
||||
const hasFilter = !!args.filter;
|
||||
const hasSorts = !!args.sorts && args.sorts.length > 0;
|
||||
const hasSearch = !!args.search;
|
||||
if (!hasFilter && !hasSorts && !hasSearch) {
|
||||
return emit('postgres', 'no filter/sort/search');
|
||||
}
|
||||
|
||||
// v1: any search stays on Postgres — loader doesn't populate search_text yet.
|
||||
if (hasSearch) return emit('postgres', 'search requires postgres');
|
||||
|
||||
// Fast path: if the collection is already resident, read the cached
|
||||
// row count instead of running a Postgres COUNT on every request.
|
||||
const tPeek = debug ? Date.now() : 0;
|
||||
const resident = this.cacheService.peek(args.baseId);
|
||||
const peekMs = debug ? Date.now() - tPeek : 0;
|
||||
if (resident) {
|
||||
if (debug) {
|
||||
console.log(
|
||||
'[cache-perf]',
|
||||
JSON.stringify({
|
||||
phase: 'router.residentCount',
|
||||
baseId: args.baseId.slice(0, 8),
|
||||
count: resident.rowCount,
|
||||
minRows,
|
||||
ms: peekMs,
|
||||
totalMs: Date.now() - tStart,
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (resident.rowCount < minRows) {
|
||||
return emit(
|
||||
'postgres',
|
||||
`rowCount=${resident.rowCount} below MIN_ROWS=${minRows}`,
|
||||
);
|
||||
}
|
||||
return emit(
|
||||
'cache',
|
||||
`qualified: rowCount=${resident.rowCount}, hasFilter=${hasFilter}, hasSort=${hasSorts}`,
|
||||
);
|
||||
}
|
||||
|
||||
const tCount = debug ? Date.now() : 0;
|
||||
const count = await this.baseRowRepo.countActiveRows(args.baseId, {
|
||||
workspaceId: args.workspaceId,
|
||||
});
|
||||
if (debug) {
|
||||
console.log(
|
||||
'[cache-perf]',
|
||||
JSON.stringify({
|
||||
phase: 'router.countActiveRows',
|
||||
baseId: args.baseId.slice(0, 8),
|
||||
countMs: Date.now() - tCount,
|
||||
count,
|
||||
minRows,
|
||||
ms: Date.now() - tCount,
|
||||
totalMs: Date.now() - tStart,
|
||||
}),
|
||||
);
|
||||
}
|
||||
if (count < minRows) {
|
||||
return emit('postgres', `rowCount=${count} below MIN_ROWS=${minRows}`);
|
||||
}
|
||||
|
||||
return emit(
|
||||
'cache',
|
||||
`qualified: rowCount=${count}, hasFilter=${hasFilter}, hasSort=${hasSorts}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
import { Injectable, Logger } from '@nestjs/common';
|
||||
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
|
||||
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
|
||||
import { buildColumnSpecs } from './column-types';
|
||||
import { buildLoaderSql } from './loader-sql';
|
||||
import { baseSchemaName } from './schema-name';
|
||||
import { DuckDbRuntime } from './duckdb-runtime';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { LoadedCollection } from './query-cache.types';
|
||||
|
||||
/*
|
||||
* Loads a base into the shared DuckDB runtime as an attached in-memory
|
||||
* database (`<schema>.rows`). Steps:
|
||||
*
|
||||
* 1. Attach a per-base schema.
|
||||
* 2. Run `CREATE TABLE <schema>.rows AS SELECT ... FROM postgres_query(...)`
|
||||
* via the writer connection — Postgres does the JSONB extraction.
|
||||
* 3. Declare `PRIMARY KEY (id)` on the new table.
|
||||
* 4. Build ART indexes on every indexable column.
|
||||
* 5. Count rows and return a LoadedCollection metadata record.
|
||||
*
|
||||
* Error path: detach the schema before propagating the error, so we don't
|
||||
* leak an empty attached DB into the runtime.
|
||||
*/
|
||||
@Injectable()
|
||||
export class CollectionLoader {
|
||||
private readonly logger = new Logger(CollectionLoader.name);
|
||||
|
||||
constructor(
|
||||
private readonly baseRepo: BaseRepo,
|
||||
private readonly basePropertyRepo: BasePropertyRepo,
|
||||
private readonly runtime: DuckDbRuntime,
|
||||
private readonly config: QueryCacheConfigProvider,
|
||||
) {}
|
||||
|
||||
async load(baseId: string, workspaceId: string): Promise<LoadedCollection> {
|
||||
if (!this.runtime.isReady()) {
|
||||
throw new Error(
|
||||
`Cannot load collection ${baseId}: duckdb runtime not ready. ` +
|
||||
`Check DuckDbRuntime bootstrap logs.`,
|
||||
);
|
||||
}
|
||||
|
||||
const base = await this.baseRepo.findById(baseId);
|
||||
if (!base) throw new Error(`Base ${baseId} not found`);
|
||||
const schemaVersion = (base as any).schemaVersion ?? 1;
|
||||
|
||||
const properties = await this.basePropertyRepo.findByBaseId(baseId);
|
||||
const specs = buildColumnSpecs(properties);
|
||||
const schema = baseSchemaName(baseId);
|
||||
|
||||
await this.runtime.attachBase(schema);
|
||||
|
||||
try {
|
||||
const writer = this.runtime.getWriter();
|
||||
|
||||
const sql = buildLoaderSql(specs, baseId, workspaceId, schema);
|
||||
if (this.config.config.trace) {
|
||||
console.log(
|
||||
'[cache-trace]',
|
||||
JSON.stringify({
|
||||
phase: 'loader.sql',
|
||||
baseId,
|
||||
schema,
|
||||
length: sql.length,
|
||||
sql,
|
||||
}),
|
||||
);
|
||||
}
|
||||
await writer.run(sql);
|
||||
|
||||
await writer.run(`ALTER TABLE ${schema}.rows ADD PRIMARY KEY (id)`);
|
||||
|
||||
for (const spec of specs) {
|
||||
if (!spec.indexable) continue;
|
||||
const safe = spec.column.replace(/[^a-zA-Z0-9_]/g, '_');
|
||||
const tIdx = this.config.config.trace ? Date.now() : 0;
|
||||
await writer.run(
|
||||
`CREATE INDEX ${schema}_${safe}_idx ON ${schema}.rows (${quoteIdent(spec.column)})`,
|
||||
);
|
||||
if (this.config.config.trace) {
|
||||
console.log(
|
||||
'[cache-trace]',
|
||||
JSON.stringify({
|
||||
phase: 'loader.index',
|
||||
baseId,
|
||||
schema,
|
||||
column: spec.column,
|
||||
ms: Date.now() - tIdx,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const countResult = await writer.runAndReadAll(
|
||||
`SELECT count(*) AS c FROM ${schema}.rows`,
|
||||
);
|
||||
const rowCount = Number(
|
||||
(countResult.getRowObjects()[0] as { c: bigint | number }).c,
|
||||
);
|
||||
|
||||
const approxBytes = estimateBytes(rowCount, specs.length);
|
||||
|
||||
this.logger.debug(
|
||||
`Loaded ${rowCount} rows for base ${baseId} ` +
|
||||
`(schemaVersion=${schemaVersion}, schema=${schema}, approxMB=${fmtMb(approxBytes)})`,
|
||||
);
|
||||
|
||||
return {
|
||||
baseId,
|
||||
schema,
|
||||
schemaVersion,
|
||||
columns: specs,
|
||||
lastAccessedAt: Date.now(),
|
||||
rowCount,
|
||||
approxBytes,
|
||||
};
|
||||
} catch (err) {
|
||||
try {
|
||||
await this.runtime.detachBase(schema);
|
||||
} catch { /* swallow */ }
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function estimateBytes(rowCount: number, columnCount: number): number {
|
||||
// Rough heuristic: ~64 bytes per cell (typed value + ART index entry
|
||||
// overhead). Within 2x of actual for typical schemas; used for
|
||||
// reporting only, not for eviction decisions.
|
||||
return rowCount * columnCount * 64;
|
||||
}
|
||||
|
||||
function fmtMb(bytes: number): string {
|
||||
return (bytes / (1024 * 1024)).toFixed(1);
|
||||
}
|
||||
|
||||
function quoteIdent(name: string): string {
|
||||
return `"${name.replace(/"/g, '""')}"`;
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
import { BasePropertyType } from '../base.schemas';
|
||||
import { buildColumnSpecs, SYSTEM_COLUMNS } from './column-types';
|
||||
|
||||
const p = (type: string, extra: Record<string, unknown> = {}) => ({
|
||||
id: `prop-${type}`,
|
||||
type,
|
||||
typeOptions: extra,
|
||||
}) as any;
|
||||
|
||||
describe('buildColumnSpecs', () => {
|
||||
it('includes the fixed system columns first', () => {
|
||||
const specs = buildColumnSpecs([]);
|
||||
expect(specs.map((s) => s.column)).toEqual(SYSTEM_COLUMNS.map((s) => s.column));
|
||||
});
|
||||
|
||||
it('maps text / url / email to VARCHAR indexable', () => {
|
||||
for (const t of [BasePropertyType.TEXT, BasePropertyType.URL, BasePropertyType.EMAIL]) {
|
||||
const specs = buildColumnSpecs([p(t)]);
|
||||
const user = specs[specs.length - 1];
|
||||
expect(user.ddlType).toBe('VARCHAR');
|
||||
expect(user.indexable).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('maps number to DOUBLE indexable', () => {
|
||||
const specs = buildColumnSpecs([p(BasePropertyType.NUMBER)]);
|
||||
const user = specs[specs.length - 1];
|
||||
expect(user.ddlType).toBe('DOUBLE');
|
||||
expect(user.indexable).toBe(true);
|
||||
});
|
||||
|
||||
it('maps date to TIMESTAMPTZ indexable', () => {
|
||||
const specs = buildColumnSpecs([p(BasePropertyType.DATE)]);
|
||||
const user = specs[specs.length - 1];
|
||||
expect(user.ddlType).toBe('TIMESTAMPTZ');
|
||||
expect(user.indexable).toBe(true);
|
||||
});
|
||||
|
||||
it('maps checkbox to BOOLEAN indexable', () => {
|
||||
const specs = buildColumnSpecs([p(BasePropertyType.CHECKBOX)]);
|
||||
const user = specs[specs.length - 1];
|
||||
expect(user.ddlType).toBe('BOOLEAN');
|
||||
});
|
||||
|
||||
it('maps select / status to VARCHAR indexable', () => {
|
||||
for (const t of [BasePropertyType.SELECT, BasePropertyType.STATUS]) {
|
||||
const specs = buildColumnSpecs([p(t)]);
|
||||
const user = specs[specs.length - 1];
|
||||
expect(user.ddlType).toBe('VARCHAR');
|
||||
expect(user.indexable).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
it('maps multiSelect / file / multi-person to JSON non-indexable', () => {
|
||||
for (const t of [BasePropertyType.MULTI_SELECT, BasePropertyType.FILE]) {
|
||||
const specs = buildColumnSpecs([p(t)]);
|
||||
const user = specs[specs.length - 1];
|
||||
expect(user.ddlType).toBe('JSON');
|
||||
expect(user.indexable).toBe(false);
|
||||
}
|
||||
const specs = buildColumnSpecs([p(BasePropertyType.PERSON, { allowMultiple: true })]);
|
||||
expect(specs[specs.length - 1].ddlType).toBe('JSON');
|
||||
});
|
||||
|
||||
it('maps single-person to VARCHAR indexable when allowMultiple=false', () => {
|
||||
const specs = buildColumnSpecs([p(BasePropertyType.PERSON, { allowMultiple: false })]);
|
||||
const user = specs[specs.length - 1];
|
||||
expect(user.ddlType).toBe('VARCHAR');
|
||||
expect(user.indexable).toBe(true);
|
||||
});
|
||||
|
||||
it('skips unknown property types', () => {
|
||||
const specs = buildColumnSpecs([p('unknown-type-x')]);
|
||||
expect(specs.length).toBe(SYSTEM_COLUMNS.length);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,63 @@
|
||||
import { BasePropertyType, BasePropertyTypeValue } from '../base.schemas';
|
||||
import { ColumnSpec } from './query-cache.types';
|
||||
import type { BaseProperty } from '@docmost/db/types/entity.types';
|
||||
|
||||
export const SYSTEM_COLUMNS: ColumnSpec[] = [
|
||||
{ column: 'id', ddlType: 'VARCHAR', indexable: false },
|
||||
{ column: 'base_id', ddlType: 'VARCHAR', indexable: false },
|
||||
{ column: 'workspace_id', ddlType: 'VARCHAR', indexable: false },
|
||||
{ column: 'creator_id', ddlType: 'VARCHAR', indexable: false },
|
||||
{ column: 'position', ddlType: 'VARCHAR', indexable: true },
|
||||
{ column: 'created_at', ddlType: 'TIMESTAMPTZ', indexable: true },
|
||||
{ column: 'updated_at', ddlType: 'TIMESTAMPTZ', indexable: true },
|
||||
{ column: 'last_updated_by_id', ddlType: 'VARCHAR', indexable: true },
|
||||
{ column: 'deleted_at', ddlType: 'TIMESTAMPTZ', indexable: false },
|
||||
{ column: 'search_text', ddlType: 'VARCHAR', indexable: false },
|
||||
];
|
||||
|
||||
type PropertyLike = Pick<BaseProperty, 'id' | 'type' | 'typeOptions'>;
|
||||
|
||||
export function buildColumnSpecs(properties: PropertyLike[]): ColumnSpec[] {
|
||||
const out: ColumnSpec[] = [...SYSTEM_COLUMNS];
|
||||
for (const prop of properties) {
|
||||
const spec = buildUserColumn(prop);
|
||||
if (spec) out.push(spec);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function buildUserColumn(prop: PropertyLike): ColumnSpec | null {
|
||||
const t = prop.type as BasePropertyTypeValue;
|
||||
switch (t) {
|
||||
case BasePropertyType.TEXT:
|
||||
case BasePropertyType.URL:
|
||||
case BasePropertyType.EMAIL:
|
||||
return { column: prop.id, ddlType: 'VARCHAR', indexable: true, property: prop };
|
||||
case BasePropertyType.NUMBER:
|
||||
return { column: prop.id, ddlType: 'DOUBLE', indexable: true, property: prop };
|
||||
case BasePropertyType.DATE:
|
||||
return { column: prop.id, ddlType: 'TIMESTAMPTZ', indexable: true, property: prop };
|
||||
case BasePropertyType.CHECKBOX:
|
||||
return { column: prop.id, ddlType: 'BOOLEAN', indexable: true, property: prop };
|
||||
case BasePropertyType.SELECT:
|
||||
case BasePropertyType.STATUS:
|
||||
return { column: prop.id, ddlType: 'VARCHAR', indexable: true, property: prop };
|
||||
case BasePropertyType.MULTI_SELECT:
|
||||
case BasePropertyType.FILE:
|
||||
return { column: prop.id, ddlType: 'JSON', indexable: false, property: prop };
|
||||
case BasePropertyType.PERSON: {
|
||||
const allowMultiple = !!(prop.typeOptions as any)?.allowMultiple;
|
||||
return allowMultiple
|
||||
? { column: prop.id, ddlType: 'JSON', indexable: false, property: prop }
|
||||
: { column: prop.id, ddlType: 'VARCHAR', indexable: true, property: prop };
|
||||
}
|
||||
// System types are modelled as system columns on base_rows — do not add
|
||||
// a per-property column for them. They're already in SYSTEM_COLUMNS.
|
||||
case BasePropertyType.CREATED_AT:
|
||||
case BasePropertyType.LAST_EDITED_AT:
|
||||
case BasePropertyType.LAST_EDITED_BY:
|
||||
return null;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
import { ConnectionPool } from './connection-pool';
|
||||
|
||||
describe('ConnectionPool', () => {
|
||||
it('hands out an available resource immediately', async () => {
|
||||
const pool = new ConnectionPool<string>();
|
||||
pool.init(['a', 'b']);
|
||||
expect(await pool.acquire()).toBe('b');
|
||||
expect(await pool.acquire()).toBe('a');
|
||||
});
|
||||
|
||||
it('a waiter is resolved by the next release', async () => {
|
||||
const pool = new ConnectionPool<string>();
|
||||
pool.init(['only']);
|
||||
const first = await pool.acquire();
|
||||
let resolved: string | null = null;
|
||||
const secondP = pool.acquire().then((v) => (resolved = v));
|
||||
expect(resolved).toBeNull();
|
||||
pool.release(first);
|
||||
await secondP;
|
||||
expect(resolved).toBe('only');
|
||||
});
|
||||
|
||||
it('FIFO among waiters (fair under contention)', async () => {
|
||||
const pool = new ConnectionPool<string>();
|
||||
pool.init(['only']);
|
||||
const held = await pool.acquire();
|
||||
|
||||
const order: number[] = [];
|
||||
const p1 = pool.acquire().then(() => order.push(1));
|
||||
const p2 = pool.acquire().then(() => order.push(2));
|
||||
const p3 = pool.acquire().then(() => order.push(3));
|
||||
|
||||
pool.release(held);
|
||||
await p1;
|
||||
pool.release('only'); // re-release the value that p1 got (simulated)
|
||||
await p2;
|
||||
pool.release('only');
|
||||
await p3;
|
||||
|
||||
expect(order).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it('withResource acquires, invokes callback, and releases even on throw', async () => {
|
||||
const pool = new ConnectionPool<string>();
|
||||
pool.init(['one']);
|
||||
let called = false;
|
||||
await expect(
|
||||
pool.withResource(async (v) => {
|
||||
called = true;
|
||||
expect(v).toBe('one');
|
||||
throw new Error('boom');
|
||||
}),
|
||||
).rejects.toThrow('boom');
|
||||
expect(called).toBe(true);
|
||||
// resource should be back in the pool
|
||||
expect(await pool.acquire()).toBe('one');
|
||||
});
|
||||
|
||||
it('size() reports the initial count regardless of check-outs', () => {
|
||||
const pool = new ConnectionPool<string>();
|
||||
pool.init(['a', 'b', 'c']);
|
||||
expect(pool.size()).toBe(3);
|
||||
});
|
||||
|
||||
it('close() returns all held resources and rejects pending waiters', async () => {
|
||||
const pool = new ConnectionPool<string>();
|
||||
pool.init(['only']);
|
||||
const first = await pool.acquire();
|
||||
const pending = pool.acquire();
|
||||
pending.catch(() => {}); // Attach catch to prevent unhandled rejection
|
||||
const closed = pool.close();
|
||||
expect(closed).toEqual([]); // No free resources (one is checked out)
|
||||
await expect(pending).rejects.toThrow(/closed/i);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,86 @@
|
||||
type Waiter<T> = {
|
||||
resolve: (value: T) => void;
|
||||
reject: (err: Error) => void;
|
||||
};
|
||||
|
||||
/*
|
||||
* A minimal async resource pool. No external deps. Semantics:
|
||||
*
|
||||
* - `acquire()` returns an available resource immediately, or a Promise
|
||||
* that resolves when one is released.
|
||||
* - `release(r)` returns a resource. If there are pending waiters, hands
|
||||
* to the FIFO-first one. Otherwise returns to the free list.
|
||||
* - `withResource(fn)` acquires, invokes, and releases — releases even
|
||||
* if `fn` throws.
|
||||
* - `close()` rejects all pending waiters and returns the currently-free
|
||||
* resources so the owner can release them. Already-checked-out
|
||||
* resources are the caller's responsibility to finish with and re-release
|
||||
* (they'll get a no-op release, the pool being closed).
|
||||
*
|
||||
* Initial size is set via `init(resources)`. Resources must not be checked
|
||||
* out before `init` is called. `size()` reports the canonical count (does
|
||||
* not decrement on acquire).
|
||||
*/
|
||||
export class ConnectionPool<T> {
|
||||
private free: T[] = [];
|
||||
private waiters: Waiter<T>[] = [];
|
||||
private initialCount = 0;
|
||||
private closed = false;
|
||||
|
||||
init(resources: T[]): void {
|
||||
if (this.initialCount !== 0) {
|
||||
throw new Error('ConnectionPool already initialised');
|
||||
}
|
||||
this.free = [...resources];
|
||||
this.initialCount = resources.length;
|
||||
}
|
||||
|
||||
size(): number {
|
||||
return this.initialCount;
|
||||
}
|
||||
|
||||
async acquire(): Promise<T> {
|
||||
if (this.closed) {
|
||||
throw new Error('ConnectionPool is closed');
|
||||
}
|
||||
if (this.free.length > 0) {
|
||||
return this.free.pop()!;
|
||||
}
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
this.waiters.push({ resolve, reject });
|
||||
});
|
||||
}
|
||||
|
||||
release(resource: T): void {
|
||||
if (this.closed) {
|
||||
// Drop; caller expected this
|
||||
return;
|
||||
}
|
||||
const waiter = this.waiters.shift();
|
||||
if (waiter) {
|
||||
waiter.resolve(resource);
|
||||
} else {
|
||||
this.free.push(resource);
|
||||
}
|
||||
}
|
||||
|
||||
async withResource<R>(fn: (resource: T) => Promise<R>): Promise<R> {
|
||||
const resource = await this.acquire();
|
||||
try {
|
||||
return await fn(resource);
|
||||
} finally {
|
||||
this.release(resource);
|
||||
}
|
||||
}
|
||||
|
||||
close(): T[] {
|
||||
this.closed = true;
|
||||
for (const waiter of this.waiters) {
|
||||
waiter.reject(new Error('ConnectionPool is closed'));
|
||||
}
|
||||
this.waiters = [];
|
||||
const remaining = this.free;
|
||||
this.free = [];
|
||||
return remaining;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,183 @@
|
||||
import { buildColumnSpecs } from './column-types';
|
||||
import { buildDuckDbListQuery } from './duckdb-query-builder';
|
||||
import { BasePropertyType } from '../base.schemas';
|
||||
|
||||
const SCHEMA = 'b_019c69a3dd4770148b87ec8f1675aaaa';
|
||||
|
||||
const numericProp = {
|
||||
id: '00000000-0000-0000-0000-000000000001',
|
||||
type: BasePropertyType.NUMBER,
|
||||
typeOptions: {},
|
||||
} as any;
|
||||
const textProp = {
|
||||
id: '00000000-0000-0000-0000-000000000002',
|
||||
type: BasePropertyType.TEXT,
|
||||
typeOptions: {},
|
||||
} as any;
|
||||
|
||||
const columns = buildColumnSpecs([numericProp, textProp]);
|
||||
|
||||
describe('buildDuckDbListQuery', () => {
|
||||
it('renders no-filter, no-sort, no-search as live-rows-paginated-by-position', () => {
|
||||
const { sql, params } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
pagination: { limit: 100 },
|
||||
});
|
||||
expect(sql).toContain(`FROM ${SCHEMA}.rows`);
|
||||
expect(sql).toMatch(/deleted_at IS NULL/);
|
||||
expect(sql).toMatch(/ORDER BY position ASC, id ASC/);
|
||||
expect(sql).toMatch(/LIMIT 101/);
|
||||
expect(params).toEqual([]);
|
||||
});
|
||||
|
||||
it('renders numeric gt filter with parameterized value', () => {
|
||||
const { sql, params } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
filter: {
|
||||
op: 'and',
|
||||
children: [{ propertyId: numericProp.id, op: 'gt', value: 42 }],
|
||||
},
|
||||
pagination: { limit: 100 },
|
||||
});
|
||||
expect(sql).toMatch(new RegExp(`"${numericProp.id}" > \\?`));
|
||||
expect(params).toContain(42);
|
||||
});
|
||||
|
||||
it('renders text contains with ILIKE and escaped wildcards', () => {
|
||||
const { sql, params } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
filter: {
|
||||
op: 'and',
|
||||
children: [{ propertyId: textProp.id, op: 'contains', value: 'a_b%c' }],
|
||||
},
|
||||
pagination: { limit: 100 },
|
||||
});
|
||||
expect(sql).toMatch(/ILIKE \?/);
|
||||
expect(params).toContain('%a\\_b\\%c%');
|
||||
});
|
||||
|
||||
it('renders sort with sentinel wrapping and cursor keyset', () => {
|
||||
const { sql } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
sorts: [{ propertyId: numericProp.id, direction: 'asc' }],
|
||||
pagination: {
|
||||
limit: 50,
|
||||
afterKeys: { s0: 10, position: 'A0', id: '00000000-0000-0000-0000-0000000000aa' },
|
||||
},
|
||||
});
|
||||
expect(sql).toMatch(/COALESCE\("[0-9a-f-]+", '?[Ii]nfinity'?::[A-Z]+\) AS s0/);
|
||||
expect(sql).toMatch(/ORDER BY s0 ASC, position ASC, id ASC/);
|
||||
// keyset OR-chain
|
||||
expect(sql).toMatch(/s0 > \?/);
|
||||
});
|
||||
|
||||
it('renders search in trgm mode as ILIKE on search_text', () => {
|
||||
const { sql, params } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
search: { mode: 'trgm', query: 'hello' },
|
||||
pagination: { limit: 10 },
|
||||
});
|
||||
expect(sql).toMatch(/search_text ILIKE \?/);
|
||||
expect(params).toContain('%hello%');
|
||||
});
|
||||
|
||||
it('renders multi-select any filter with json_contains and to_json binding', () => {
|
||||
const multiProp = {
|
||||
id: '00000000-0000-0000-0000-000000000010',
|
||||
type: BasePropertyType.MULTI_SELECT,
|
||||
typeOptions: {},
|
||||
} as any;
|
||||
const cols = buildColumnSpecs([multiProp]);
|
||||
const choiceA = 'choice-uuid-aaa';
|
||||
const choiceB = 'choice-uuid-bbb';
|
||||
const { sql, params } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns: cols,
|
||||
filter: {
|
||||
op: 'and',
|
||||
children: [{ propertyId: multiProp.id, op: 'any', value: [choiceA, choiceB] }],
|
||||
},
|
||||
pagination: { limit: 100 },
|
||||
});
|
||||
expect(sql).toMatch(/json_contains\("[0-9a-f-]+", to_json\(\?\)\)/);
|
||||
expect(sql).not.toMatch(/json_array_contains/);
|
||||
expect(params).toContain(choiceA);
|
||||
expect(params).toContain(choiceB);
|
||||
});
|
||||
|
||||
it('renders nested AND/OR groups with correct parentheses', () => {
|
||||
const { sql } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
filter: {
|
||||
op: 'or',
|
||||
children: [
|
||||
{ op: 'and', children: [{ propertyId: numericProp.id, op: 'gt', value: 1 }] },
|
||||
{ op: 'and', children: [{ propertyId: textProp.id, op: 'eq', value: 'x' }] },
|
||||
],
|
||||
},
|
||||
pagination: { limit: 100 },
|
||||
});
|
||||
expect(sql).toMatch(/\(\(.+\) OR \(.+\)\)/);
|
||||
});
|
||||
|
||||
it('handles empty filter group without emitting WHERE on it', () => {
|
||||
const { sql, params } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
filter: { op: 'and', children: [] },
|
||||
pagination: { limit: 100 },
|
||||
});
|
||||
// either WHERE clause elided entirely, or group becomes TRUE
|
||||
expect(sql).toMatch(/deleted_at IS NULL/);
|
||||
expect(params).toEqual([]);
|
||||
});
|
||||
|
||||
it('renders multi-sort keyset with s0, s1, position, id chain', () => {
|
||||
const { sql } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
sorts: [
|
||||
{ propertyId: numericProp.id, direction: 'asc' },
|
||||
{ propertyId: textProp.id, direction: 'desc' },
|
||||
],
|
||||
pagination: {
|
||||
limit: 10,
|
||||
afterKeys: { s0: 10, s1: 'abc', position: 'A0', id: '00000000-0000-0000-0000-0000000000aa' },
|
||||
},
|
||||
});
|
||||
expect(sql).toMatch(/AS s0/);
|
||||
expect(sql).toMatch(/AS s1/);
|
||||
expect(sql).toMatch(/ORDER BY s0 ASC, s1 DESC, position ASC, id ASC/);
|
||||
expect(sql).toMatch(/s0 > \?/);
|
||||
expect(sql).toMatch(/s1 < \?/); // desc → less-than
|
||||
});
|
||||
|
||||
it('renders text isEmpty as IS NULL OR = empty-string', () => {
|
||||
const { sql } = buildDuckDbListQuery({
|
||||
schema: SCHEMA,
|
||||
columns,
|
||||
filter: {
|
||||
op: 'and',
|
||||
children: [{ propertyId: textProp.id, op: 'isEmpty' }],
|
||||
},
|
||||
pagination: { limit: 10 },
|
||||
});
|
||||
expect(sql).toMatch(new RegExp(`"${textProp.id}" IS NULL`));
|
||||
});
|
||||
|
||||
it('rejects invalid schema name', () => {
|
||||
expect(() =>
|
||||
buildDuckDbListQuery({
|
||||
schema: 'bad name',
|
||||
columns: [],
|
||||
pagination: { limit: 10 },
|
||||
}),
|
||||
).toThrow(/invalid schema/i);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,637 @@
|
||||
import { BasePropertyType } from '../base.schemas';
|
||||
import {
|
||||
Condition,
|
||||
FilterNode,
|
||||
SearchSpec,
|
||||
SortSpec,
|
||||
} from '../engine/schema.zod';
|
||||
import { escapeIlike } from '../engine/extractors';
|
||||
import { PropertyKind, propertyKind } from '../engine/kinds';
|
||||
import { ColumnSpec } from './query-cache.types';
|
||||
|
||||
export type AfterKeys = Record<string, unknown>;
|
||||
|
||||
export type DuckDbListQueryOpts = {
|
||||
schema: string;
|
||||
columns: ColumnSpec[];
|
||||
filter?: FilterNode;
|
||||
sorts?: SortSpec[];
|
||||
search?: SearchSpec;
|
||||
pagination: { limit: number; afterKeys?: AfterKeys };
|
||||
};
|
||||
|
||||
export type DuckDbListQuery = {
|
||||
sql: string;
|
||||
params: unknown[];
|
||||
};
|
||||
|
||||
export class FtsNotSupportedInCache extends Error {
|
||||
constructor() {
|
||||
super('FTS search mode is not supported in the DuckDB query cache');
|
||||
this.name = 'FtsNotSupportedInCache';
|
||||
}
|
||||
}
|
||||
|
||||
type ColumnIndex = {
|
||||
byId: Map<string, ColumnSpec>;
|
||||
userColumns: ColumnSpec[];
|
||||
};
|
||||
|
||||
type SortBuild = {
|
||||
key: string;
|
||||
expression: string;
|
||||
direction: 'asc' | 'desc';
|
||||
};
|
||||
|
||||
// System property type → DuckDB system column name. Mirrors
|
||||
// engine/kinds.SYSTEM_COLUMN but in snake_case (DuckDB table uses
|
||||
// snake_case columns; the engine relies on Kysely's camel-case plugin).
|
||||
const SYSTEM_COLUMN_DUCK: Record<string, 'created_at' | 'updated_at' | 'last_updated_by_id'> = {
|
||||
[BasePropertyType.CREATED_AT]: 'created_at',
|
||||
[BasePropertyType.LAST_EDITED_AT]: 'updated_at',
|
||||
[BasePropertyType.LAST_EDITED_BY]: 'last_updated_by_id',
|
||||
};
|
||||
|
||||
export function buildDuckDbListQuery(
|
||||
opts: DuckDbListQueryOpts,
|
||||
): DuckDbListQuery {
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(opts.schema)) {
|
||||
throw new Error(`Invalid schema name "${opts.schema}"`);
|
||||
}
|
||||
const rowsTable = `${opts.schema}.rows`;
|
||||
const index = indexColumns(opts.columns);
|
||||
const params: unknown[] = [];
|
||||
|
||||
const whereClauses: string[] = ['deleted_at IS NULL'];
|
||||
|
||||
if (opts.search) {
|
||||
whereClauses.push(buildSearch(opts.search, params));
|
||||
}
|
||||
|
||||
if (opts.filter) {
|
||||
const filterSql = buildFilter(opts.filter, index, params);
|
||||
if (filterSql) whereClauses.push(filterSql);
|
||||
}
|
||||
|
||||
const sortBuilds = buildSorts(opts.sorts ?? [], index);
|
||||
|
||||
const selectParts: string[] = buildSelect(index, sortBuilds);
|
||||
|
||||
if (opts.pagination.afterKeys) {
|
||||
whereClauses.push(
|
||||
buildKeyset(opts.pagination.afterKeys, sortBuilds, params),
|
||||
);
|
||||
}
|
||||
|
||||
const orderByParts: string[] = [
|
||||
...sortBuilds.map((s) => `${s.key} ${s.direction.toUpperCase()}`),
|
||||
'position ASC',
|
||||
'id ASC',
|
||||
];
|
||||
|
||||
const sql =
|
||||
`SELECT ${selectParts.join(', ')}` +
|
||||
` FROM ${rowsTable}` +
|
||||
` WHERE ${whereClauses.join(' AND ')}` +
|
||||
` ORDER BY ${orderByParts.join(', ')}` +
|
||||
` LIMIT ${opts.pagination.limit + 1}`;
|
||||
|
||||
return { sql, params };
|
||||
}
|
||||
|
||||
// --- select projection -------------------------------------------------
|
||||
|
||||
function buildSelect(index: ColumnIndex, sortBuilds: SortBuild[]): string[] {
|
||||
const parts: string[] = [
|
||||
'id',
|
||||
'base_id',
|
||||
'position',
|
||||
'creator_id',
|
||||
'last_updated_by_id',
|
||||
'workspace_id',
|
||||
'created_at',
|
||||
'updated_at',
|
||||
'deleted_at',
|
||||
];
|
||||
for (const col of index.userColumns) {
|
||||
parts.push(quoteIdent(col.column));
|
||||
}
|
||||
for (const sb of sortBuilds) {
|
||||
parts.push(`${sb.expression} AS ${sb.key}`);
|
||||
}
|
||||
return parts;
|
||||
}
|
||||
|
||||
// --- filter ------------------------------------------------------------
|
||||
|
||||
function buildFilter(
|
||||
node: FilterNode,
|
||||
index: ColumnIndex,
|
||||
params: unknown[],
|
||||
): string {
|
||||
if ('children' in node) {
|
||||
if (node.children.length === 0) return 'TRUE';
|
||||
const built = node.children
|
||||
.map((c) => buildFilter(c, index, params))
|
||||
.filter((s) => s.length > 0);
|
||||
if (built.length === 0) return 'TRUE';
|
||||
const joiner = node.op === 'and' ? ' AND ' : ' OR ';
|
||||
return `(${built.join(joiner)})`;
|
||||
}
|
||||
return buildCondition(node, index, params);
|
||||
}
|
||||
|
||||
function buildCondition(
|
||||
cond: Condition,
|
||||
index: ColumnIndex,
|
||||
params: unknown[],
|
||||
): string {
|
||||
const col = index.byId.get(cond.propertyId);
|
||||
if (!col) return 'FALSE';
|
||||
|
||||
const propType = col.property?.type;
|
||||
if (propType && SYSTEM_COLUMN_DUCK[propType]) {
|
||||
return systemCondition(SYSTEM_COLUMN_DUCK[propType], cond, params);
|
||||
}
|
||||
|
||||
const kind = propType ? propertyKind(propType) : null;
|
||||
if (!kind) return 'FALSE';
|
||||
|
||||
const colRef = quoteIdent(col.column);
|
||||
|
||||
switch (kind) {
|
||||
case PropertyKind.TEXT:
|
||||
return textCondition(colRef, cond, params);
|
||||
case PropertyKind.NUMERIC:
|
||||
return numericCondition(colRef, cond, params);
|
||||
case PropertyKind.DATE:
|
||||
return dateCondition(colRef, cond, params);
|
||||
case PropertyKind.BOOL:
|
||||
return boolCondition(colRef, cond, params);
|
||||
case PropertyKind.SELECT:
|
||||
return selectCondition(colRef, cond, params);
|
||||
case PropertyKind.MULTI:
|
||||
return arrayOfIdsCondition(colRef, cond, params);
|
||||
case PropertyKind.PERSON: {
|
||||
const allowMultiple = !!(col.property?.typeOptions as any)?.allowMultiple;
|
||||
return allowMultiple
|
||||
? arrayOfIdsCondition(colRef, cond, params)
|
||||
: selectCondition(colRef, cond, params);
|
||||
}
|
||||
case PropertyKind.FILE:
|
||||
return arrayOfIdsCondition(colRef, cond, params);
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
function textCondition(
|
||||
colRef: string,
|
||||
cond: Condition,
|
||||
params: unknown[],
|
||||
): string {
|
||||
const val = cond.value;
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return `(${colRef} IS NULL OR ${colRef} = '')`;
|
||||
case 'isNotEmpty':
|
||||
return `(${colRef} IS NOT NULL AND ${colRef} != '')`;
|
||||
case 'eq':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${colRef} = ?`;
|
||||
case 'neq':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `(${colRef} IS NULL OR ${colRef} != ?)`;
|
||||
case 'contains':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(`%${escapeIlike(String(val))}%`);
|
||||
return `${colRef} ILIKE ?`;
|
||||
case 'ncontains':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(`%${escapeIlike(String(val))}%`);
|
||||
return `(${colRef} IS NULL OR ${colRef} NOT ILIKE ?)`;
|
||||
case 'startsWith':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(`${escapeIlike(String(val))}%`);
|
||||
return `${colRef} ILIKE ?`;
|
||||
case 'endsWith':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(`%${escapeIlike(String(val))}`);
|
||||
return `${colRef} ILIKE ?`;
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
function numericCondition(
|
||||
colRef: string,
|
||||
cond: Condition,
|
||||
params: unknown[],
|
||||
): string {
|
||||
const raw = cond.value;
|
||||
const num = raw == null ? null : Number(raw);
|
||||
const bad = num == null || Number.isNaN(num);
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return `${colRef} IS NULL`;
|
||||
case 'isNotEmpty':
|
||||
return `${colRef} IS NOT NULL`;
|
||||
case 'eq':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(num);
|
||||
return `${colRef} = ?`;
|
||||
case 'neq':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(num);
|
||||
return `(${colRef} IS NULL OR ${colRef} != ?)`;
|
||||
case 'gt':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(num);
|
||||
return `${colRef} > ?`;
|
||||
case 'gte':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(num);
|
||||
return `${colRef} >= ?`;
|
||||
case 'lt':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(num);
|
||||
return `${colRef} < ?`;
|
||||
case 'lte':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(num);
|
||||
return `${colRef} <= ?`;
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
function dateCondition(
|
||||
colRef: string,
|
||||
cond: Condition,
|
||||
params: unknown[],
|
||||
): string {
|
||||
const raw = cond.value;
|
||||
const bad = raw == null || raw === '';
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return `${colRef} IS NULL`;
|
||||
case 'isNotEmpty':
|
||||
return `${colRef} IS NOT NULL`;
|
||||
case 'eq':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(raw));
|
||||
return `${colRef} = ?`;
|
||||
case 'neq':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(raw));
|
||||
return `(${colRef} IS NULL OR ${colRef} != ?)`;
|
||||
case 'before':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(raw));
|
||||
return `${colRef} < ?`;
|
||||
case 'after':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(raw));
|
||||
return `${colRef} > ?`;
|
||||
case 'onOrBefore':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(raw));
|
||||
return `${colRef} <= ?`;
|
||||
case 'onOrAfter':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(raw));
|
||||
return `${colRef} >= ?`;
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
function boolCondition(
|
||||
colRef: string,
|
||||
cond: Condition,
|
||||
params: unknown[],
|
||||
): string {
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return `${colRef} IS NULL`;
|
||||
case 'isNotEmpty':
|
||||
return `${colRef} IS NOT NULL`;
|
||||
case 'eq':
|
||||
if (cond.value == null) return 'FALSE';
|
||||
params.push(Boolean(cond.value));
|
||||
return `${colRef} = ?`;
|
||||
case 'neq':
|
||||
if (cond.value == null) return 'FALSE';
|
||||
params.push(Boolean(cond.value));
|
||||
return `(${colRef} IS NULL OR ${colRef} != ?)`;
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
function selectCondition(
|
||||
colRef: string,
|
||||
cond: Condition,
|
||||
params: unknown[],
|
||||
): string {
|
||||
const val = cond.value;
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return `(${colRef} IS NULL OR ${colRef} = '')`;
|
||||
case 'isNotEmpty':
|
||||
return `(${colRef} IS NOT NULL AND ${colRef} != '')`;
|
||||
case 'eq':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${colRef} = ?`;
|
||||
case 'neq':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `(${colRef} IS NULL OR ${colRef} != ?)`;
|
||||
case 'any': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return 'FALSE';
|
||||
const placeholders = arr.map(() => '?').join(', ');
|
||||
for (const v of arr) params.push(v);
|
||||
return `${colRef} IN (${placeholders})`;
|
||||
}
|
||||
case 'none': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return 'TRUE';
|
||||
const placeholders = arr.map(() => '?').join(', ');
|
||||
for (const v of arr) params.push(v);
|
||||
return `(${colRef} IS NULL OR ${colRef} NOT IN (${placeholders}))`;
|
||||
}
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
function arrayOfIdsCondition(
|
||||
colRef: string,
|
||||
cond: Condition,
|
||||
params: unknown[],
|
||||
): string {
|
||||
const val = cond.value;
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return `(${colRef} IS NULL OR json_array_length(${colRef}) = 0)`;
|
||||
case 'isNotEmpty':
|
||||
return `(${colRef} IS NOT NULL AND json_array_length(${colRef}) > 0)`;
|
||||
case 'any': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return 'FALSE';
|
||||
const legs = arr.map(() => jsonArrayContains(colRef, '?'));
|
||||
for (const v of arr) params.push(v);
|
||||
return `(${legs.join(' OR ')})`;
|
||||
}
|
||||
case 'all': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return 'TRUE';
|
||||
const legs = arr.map(() => jsonArrayContains(colRef, '?'));
|
||||
for (const v of arr) params.push(v);
|
||||
return `(${legs.join(' AND ')})`;
|
||||
}
|
||||
case 'none': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return 'TRUE';
|
||||
const legs = arr.map(() => jsonArrayContains(colRef, '?'));
|
||||
for (const v of arr) params.push(v);
|
||||
return `(${colRef} IS NULL OR NOT (${legs.join(' OR ')}))`;
|
||||
}
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
function systemCondition(
|
||||
column: 'created_at' | 'updated_at' | 'last_updated_by_id',
|
||||
cond: Condition,
|
||||
params: unknown[],
|
||||
): string {
|
||||
const val = cond.value;
|
||||
|
||||
if (column === 'last_updated_by_id') {
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return `${column} IS NULL`;
|
||||
case 'isNotEmpty':
|
||||
return `${column} IS NOT NULL`;
|
||||
case 'eq':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${column} = ?`;
|
||||
case 'neq':
|
||||
if (val == null) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `(${column} IS NULL OR ${column} != ?)`;
|
||||
case 'any': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return 'FALSE';
|
||||
const placeholders = arr.map(() => '?').join(', ');
|
||||
for (const v of arr) params.push(v);
|
||||
return `${column} IN (${placeholders})`;
|
||||
}
|
||||
case 'none': {
|
||||
const arr = asStringArray(val);
|
||||
if (arr.length === 0) return 'TRUE';
|
||||
const placeholders = arr.map(() => '?').join(', ');
|
||||
for (const v of arr) params.push(v);
|
||||
return `(${column} IS NULL OR ${column} NOT IN (${placeholders}))`;
|
||||
}
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
const bad = val == null || val === '';
|
||||
switch (cond.op) {
|
||||
case 'isEmpty':
|
||||
return 'FALSE';
|
||||
case 'isNotEmpty':
|
||||
return 'TRUE';
|
||||
case 'eq':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${column} = ?`;
|
||||
case 'neq':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${column} != ?`;
|
||||
case 'before':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${column} < ?`;
|
||||
case 'after':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${column} > ?`;
|
||||
case 'onOrBefore':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${column} <= ?`;
|
||||
case 'onOrAfter':
|
||||
if (bad) return 'FALSE';
|
||||
params.push(String(val));
|
||||
return `${column} >= ?`;
|
||||
default:
|
||||
return 'FALSE';
|
||||
}
|
||||
}
|
||||
|
||||
// --- sort --------------------------------------------------------------
|
||||
|
||||
function buildSorts(sorts: SortSpec[], index: ColumnIndex): SortBuild[] {
|
||||
const out: SortBuild[] = [];
|
||||
for (let i = 0; i < sorts.length; i++) {
|
||||
const s = sorts[i];
|
||||
const col = index.byId.get(s.propertyId);
|
||||
if (!col) continue;
|
||||
const key = `s${i}`;
|
||||
|
||||
const propType = col.property?.type;
|
||||
const sys = propType ? SYSTEM_COLUMN_DUCK[propType] : undefined;
|
||||
if (sys) {
|
||||
out.push({ key, expression: sys, direction: s.direction });
|
||||
continue;
|
||||
}
|
||||
|
||||
const kind = propType ? propertyKind(propType) : null;
|
||||
if (!kind) continue;
|
||||
|
||||
out.push(wrapWithSentinel(col.column, kind, s.direction, key));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function wrapWithSentinel(
|
||||
column: string,
|
||||
kind: ReturnType<typeof propertyKind>,
|
||||
direction: 'asc' | 'desc',
|
||||
key: string,
|
||||
): SortBuild {
|
||||
const colRef = quoteIdent(column);
|
||||
let sentinel: string;
|
||||
if (kind === PropertyKind.NUMERIC) {
|
||||
sentinel = direction === 'asc' ? `'Infinity'::DOUBLE` : `'-Infinity'::DOUBLE`;
|
||||
} else if (kind === PropertyKind.DATE) {
|
||||
sentinel =
|
||||
direction === 'asc'
|
||||
? `'9999-12-31 23:59:59+00'::TIMESTAMPTZ`
|
||||
: `'0001-01-01 00:00:00+00'::TIMESTAMPTZ`;
|
||||
} else if (kind === PropertyKind.BOOL) {
|
||||
sentinel = direction === 'asc' ? 'TRUE' : 'FALSE';
|
||||
} else {
|
||||
// TEXT / SELECT / MULTI / PERSON / FILE — sort by the column's raw text
|
||||
// representation; JSON-typed list columns will stringify in DuckDB
|
||||
// lexicographically, matching the Postgres engine's text extractor.
|
||||
sentinel = direction === 'asc' ? 'CHR(1114111)' : `''`;
|
||||
}
|
||||
return {
|
||||
key,
|
||||
expression: `COALESCE(${colRef}, ${sentinel})`,
|
||||
direction,
|
||||
};
|
||||
}
|
||||
|
||||
// --- search ------------------------------------------------------------
|
||||
|
||||
function buildSearch(spec: SearchSpec, params: unknown[]): string {
|
||||
const q = spec.query.trim();
|
||||
if (!q) return 'TRUE';
|
||||
if (spec.mode === 'fts') {
|
||||
throw new FtsNotSupportedInCache();
|
||||
}
|
||||
params.push(`%${escapeIlike(q)}%`);
|
||||
return `search_text ILIKE ?`;
|
||||
}
|
||||
|
||||
// --- keyset ------------------------------------------------------------
|
||||
|
||||
function buildKeyset(
|
||||
afterKeys: AfterKeys,
|
||||
sortBuilds: SortBuild[],
|
||||
params: unknown[],
|
||||
): string {
|
||||
// Keys in the same order as ORDER BY: s0..sN, then position, then id.
|
||||
// Mirrors cursor-pagination.ts `applyCursor`: builds the lexicographic
|
||||
// OR-chain from tail to head, wrapping each step as
|
||||
// `(fi > v) OR (fi = v AND <tail>)`.
|
||||
//
|
||||
// Param binding is positional (1-based `?`). Placeholders appear
|
||||
// left-to-right in the final SQL as: leg0(head), leg0(tie), leg1(head),
|
||||
// leg1(tie), ..., legN(head). We therefore collect the per-leg params
|
||||
// first, then flatten in head→tail order at the end.
|
||||
type Leg = { key: string; expression: string; direction: 'asc' | 'desc' };
|
||||
const legs: Leg[] = [
|
||||
...sortBuilds.map((s) => ({
|
||||
key: s.key,
|
||||
expression: s.key,
|
||||
direction: s.direction,
|
||||
})),
|
||||
{ key: 'position', expression: 'position', direction: 'asc' },
|
||||
{ key: 'id', expression: 'id', direction: 'asc' },
|
||||
];
|
||||
|
||||
// Skip legs whose key is absent from afterKeys (shouldn't happen for
|
||||
// well-formed cursors, but keeps the builder defensive).
|
||||
const usable = legs.filter((l) => l.key in afterKeys);
|
||||
if (usable.length === 0) return 'TRUE';
|
||||
|
||||
// legParams[i] = [value, value?] — one push for the head `>` or `<`,
|
||||
// one more push for the tie `=` on every leg except the last.
|
||||
const legParams: unknown[][] = [];
|
||||
let expr = '';
|
||||
for (let i = usable.length - 1; i >= 0; i--) {
|
||||
const leg = usable[i];
|
||||
const value = afterKeys[leg.key];
|
||||
const cmp = leg.direction === 'asc' ? '>' : '<';
|
||||
|
||||
const head = `${leg.expression} ${cmp} ?`;
|
||||
|
||||
if (!expr) {
|
||||
legParams[i] = [value];
|
||||
expr = head;
|
||||
continue;
|
||||
}
|
||||
legParams[i] = [value, value];
|
||||
const tie = `${leg.expression} = ?`;
|
||||
expr = `(${head} OR (${tie} AND ${expr}))`;
|
||||
}
|
||||
|
||||
// Flatten legs in head→tail (placeholder) order.
|
||||
for (const values of legParams) {
|
||||
for (const v of values) params.push(v);
|
||||
}
|
||||
return expr;
|
||||
}
|
||||
|
||||
// --- utilities ---------------------------------------------------------
|
||||
|
||||
function indexColumns(columns: ColumnSpec[]): ColumnIndex {
|
||||
const byId = new Map<string, ColumnSpec>();
|
||||
const userColumns: ColumnSpec[] = [];
|
||||
for (const c of columns) {
|
||||
if (c.property) {
|
||||
byId.set(c.property.id, c);
|
||||
userColumns.push(c);
|
||||
}
|
||||
}
|
||||
return { byId, userColumns };
|
||||
}
|
||||
|
||||
function quoteIdent(name: string): string {
|
||||
return `"${name.replace(/"/g, '""')}"`;
|
||||
}
|
||||
|
||||
function jsonArrayContains(colRef: string, paramPlaceholder: string): string {
|
||||
return `json_contains(${colRef}, to_json(${paramPlaceholder}))`;
|
||||
}
|
||||
|
||||
function asStringArray(val: unknown): string[] {
|
||||
if (val == null) return [];
|
||||
if (Array.isArray(val)) return val.filter((v) => v != null).map(String);
|
||||
return [String(val)];
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
import { DuckDbRuntime } from './duckdb-runtime';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
|
||||
const makeConfig = (
|
||||
overrides: Partial<QueryCacheConfigProvider['config']> = {},
|
||||
): QueryCacheConfigProvider =>
|
||||
({
|
||||
config: {
|
||||
enabled: true,
|
||||
minRows: 25_000,
|
||||
maxCollections: 50,
|
||||
warmTopN: 50,
|
||||
memoryLimit: '256MB',
|
||||
threads: 2,
|
||||
tempDirectory: `${require('node:os').tmpdir()}/docmost-duckdb-runtime-test`,
|
||||
trace: false,
|
||||
readerPoolSize: 2,
|
||||
...overrides,
|
||||
},
|
||||
}) as unknown as QueryCacheConfigProvider;
|
||||
|
||||
const makeEnv = (): { getDatabaseURL: () => string } => ({
|
||||
getDatabaseURL: () => process.env.DATABASE_URL ?? '',
|
||||
});
|
||||
|
||||
describe('DuckDbRuntime', () => {
|
||||
it('no-ops when the cache is disabled', async () => {
|
||||
const rt = new DuckDbRuntime(makeConfig({ enabled: false }), makeEnv() as any);
|
||||
await rt.onApplicationBootstrap();
|
||||
expect(rt.isReady()).toBe(false);
|
||||
await rt.onModuleDestroy();
|
||||
});
|
||||
|
||||
it('bootstraps instance, extension, PG attach, and reader pool', async () => {
|
||||
const rt = new DuckDbRuntime(makeConfig(), makeEnv() as any);
|
||||
await rt.onApplicationBootstrap();
|
||||
expect(rt.isReady()).toBe(true);
|
||||
expect(rt.readerPoolSize()).toBe(2);
|
||||
await rt.onModuleDestroy();
|
||||
});
|
||||
|
||||
it('attachBase creates a per-base schema and detachBase removes it', async () => {
|
||||
const rt = new DuckDbRuntime(makeConfig(), makeEnv() as any);
|
||||
await rt.onApplicationBootstrap();
|
||||
try {
|
||||
const schema = 'b_testaaaaaaaaaaaaaaaaaaaaaaaaaa';
|
||||
await rt.attachBase(schema);
|
||||
await rt.getWriter().run(`CREATE TABLE ${schema}.t (x INTEGER)`);
|
||||
await rt.getWriter().run(`INSERT INTO ${schema}.t VALUES (1), (2), (3)`);
|
||||
const res = await rt
|
||||
.getWriter()
|
||||
.runAndReadAll(`SELECT count(*) AS c FROM ${schema}.t`);
|
||||
const row = res.getRowObjects()[0] as { c: bigint | number };
|
||||
expect(Number(row.c)).toBe(3);
|
||||
|
||||
await rt.detachBase(schema);
|
||||
await expect(
|
||||
rt.getWriter().run(`SELECT count(*) FROM ${schema}.t`),
|
||||
).rejects.toThrow();
|
||||
} finally {
|
||||
await rt.onModuleDestroy();
|
||||
}
|
||||
});
|
||||
|
||||
it('withReader parallelises across pool', async () => {
|
||||
const rt = new DuckDbRuntime(makeConfig({ readerPoolSize: 2 }), makeEnv() as any);
|
||||
await rt.onApplicationBootstrap();
|
||||
try {
|
||||
const started: string[] = [];
|
||||
const ended: string[] = [];
|
||||
const p1 = rt.withReader(async (conn) => {
|
||||
started.push('a');
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
await conn.runAndReadAll('SELECT 1');
|
||||
ended.push('a');
|
||||
});
|
||||
const p2 = rt.withReader(async (conn) => {
|
||||
started.push('b');
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
await conn.runAndReadAll('SELECT 1');
|
||||
ended.push('b');
|
||||
});
|
||||
await Promise.all([p1, p2]);
|
||||
expect(new Set(started)).toEqual(new Set(['a', 'b']));
|
||||
expect(started.length).toBe(2);
|
||||
expect(ended.length).toBe(2);
|
||||
} finally {
|
||||
await rt.onModuleDestroy();
|
||||
}
|
||||
});
|
||||
|
||||
it('withReader on a 3rd concurrent request with pool=2 queues correctly', async () => {
|
||||
const rt = new DuckDbRuntime(makeConfig({ readerPoolSize: 2 }), makeEnv() as any);
|
||||
await rt.onApplicationBootstrap();
|
||||
try {
|
||||
const order: number[] = [];
|
||||
const makeOne = (n: number, delayMs: number) =>
|
||||
rt.withReader(async () => {
|
||||
await new Promise((r) => setTimeout(r, delayMs));
|
||||
order.push(n);
|
||||
});
|
||||
const p1 = makeOne(1, 40);
|
||||
const p2 = makeOne(2, 40);
|
||||
const p3 = makeOne(3, 5);
|
||||
await Promise.all([p1, p2, p3]);
|
||||
expect(order.length).toBe(3);
|
||||
expect(order.indexOf(3)).toBeGreaterThan(0);
|
||||
} finally {
|
||||
await rt.onModuleDestroy();
|
||||
}
|
||||
});
|
||||
|
||||
it('getWriter throws if not ready', () => {
|
||||
const rt = new DuckDbRuntime(makeConfig(), makeEnv() as any);
|
||||
expect(() => rt.getWriter()).toThrow(/not ready/i);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,211 @@
|
||||
import {
|
||||
Injectable,
|
||||
Logger,
|
||||
OnApplicationBootstrap,
|
||||
OnModuleDestroy,
|
||||
} from '@nestjs/common';
|
||||
import { DuckDBInstance, DuckDBConnection } from '@duckdb/node-api';
|
||||
import * as fs from 'node:fs';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { EnvironmentService } from '../../../integrations/environment/environment.service';
|
||||
import { ConnectionPool } from './connection-pool';
|
||||
|
||||
/*
|
||||
* DuckDbRuntime
|
||||
* -------------
|
||||
* Owns the process-wide DuckDB instance and everything attached to it:
|
||||
*
|
||||
* - One `DuckDBInstance` at `:memory:` with `memory_limit`, `threads`,
|
||||
* `temp_directory` configured from env.
|
||||
* - One writer `DuckDBConnection` for ATTACH/DETACH/CREATE TABLE/INSERT.
|
||||
* - A pool of N reader connections for SELECTs; `withReader(fn)` lends
|
||||
* one out, runs the callback, returns it — fair FIFO under contention.
|
||||
* - The `postgres` extension is installed + loaded once, not per-base.
|
||||
* - A single long-lived ATTACH against Postgres (READ_ONLY). All loaders
|
||||
* reference `postgres_query('pg', $pgsql$ ... $pgsql$)` without doing
|
||||
* their own attach/detach.
|
||||
*
|
||||
* When the query cache is disabled (`config.enabled === false`), the
|
||||
* runtime is a no-op: nothing is created, `isReady()` returns false, and
|
||||
* every consumer's own gate prevents it from touching the runtime.
|
||||
*/
|
||||
@Injectable()
|
||||
export class DuckDbRuntime implements OnApplicationBootstrap, OnModuleDestroy {
|
||||
private readonly logger = new Logger(DuckDbRuntime.name);
|
||||
private instance: DuckDBInstance | null = null;
|
||||
private writer: DuckDBConnection | null = null;
|
||||
private readonly readerPool = new ConnectionPool<DuckDBConnection>();
|
||||
private readonly attachedSchemas = new Set<string>();
|
||||
private ready = false;
|
||||
private bootstrapFailure: string | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly configProvider: QueryCacheConfigProvider,
|
||||
private readonly env: EnvironmentService,
|
||||
) {}
|
||||
|
||||
async onApplicationBootstrap(): Promise<void> {
|
||||
const config = this.configProvider.config;
|
||||
if (!config.enabled) {
|
||||
this.logger.log('query cache disabled; skipping duckdb runtime bootstrap');
|
||||
return;
|
||||
}
|
||||
|
||||
const dbUrl = this.env.getDatabaseURL();
|
||||
if (!dbUrl) {
|
||||
this.bootstrapFailure = 'DATABASE_URL is empty';
|
||||
this.logger.error('DuckDbRuntime cannot bootstrap: DATABASE_URL is empty');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.mkdirSync(config.tempDirectory, { recursive: true });
|
||||
} catch {
|
||||
/* swallow */
|
||||
}
|
||||
|
||||
try {
|
||||
this.instance = await DuckDBInstance.create(':memory:', {
|
||||
memory_limit: config.memoryLimit,
|
||||
threads: String(config.threads),
|
||||
temp_directory: config.tempDirectory,
|
||||
});
|
||||
|
||||
this.writer = await this.instance.connect();
|
||||
await this.writer.run('SET preserve_insertion_order = false');
|
||||
await this.writer.run('INSTALL postgres');
|
||||
await this.writer.run('LOAD postgres');
|
||||
await this.writer.run(
|
||||
`ATTACH ${escapeSqlString(dbUrl)} AS pg (TYPE POSTGRES, READ_ONLY)`,
|
||||
);
|
||||
|
||||
const readers: DuckDBConnection[] = [];
|
||||
for (let i = 0; i < config.readerPoolSize; i++) {
|
||||
const reader = await this.instance.connect();
|
||||
await reader.run('SET preserve_insertion_order = false');
|
||||
readers.push(reader);
|
||||
}
|
||||
this.readerPool.init(readers);
|
||||
|
||||
this.ready = true;
|
||||
this.logger.log(
|
||||
`DuckDbRuntime ready (readers=${config.readerPoolSize}, memory_limit=${config.memoryLimit})`,
|
||||
);
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.bootstrapFailure = error.message;
|
||||
this.logger.error(`DuckDbRuntime bootstrap failed: ${error.message}`);
|
||||
if (error.stack) this.logger.error(error.stack);
|
||||
this.ready = false;
|
||||
try {
|
||||
this.readerPool.close().forEach((c) => c.closeSync());
|
||||
} catch { /* swallow */ }
|
||||
try {
|
||||
this.writer?.closeSync();
|
||||
} catch { /* swallow */ }
|
||||
try {
|
||||
this.instance?.closeSync();
|
||||
} catch { /* swallow */ }
|
||||
this.writer = null;
|
||||
this.instance = null;
|
||||
}
|
||||
}
|
||||
|
||||
async onModuleDestroy(): Promise<void> {
|
||||
for (const c of this.readerPool.close()) {
|
||||
try {
|
||||
c.closeSync();
|
||||
} catch { /* swallow */ }
|
||||
}
|
||||
if (this.writer) {
|
||||
try {
|
||||
this.writer.closeSync();
|
||||
} catch { /* swallow */ }
|
||||
this.writer = null;
|
||||
}
|
||||
if (this.instance) {
|
||||
try {
|
||||
this.instance.closeSync();
|
||||
} catch { /* swallow */ }
|
||||
this.instance = null;
|
||||
}
|
||||
this.attachedSchemas.clear();
|
||||
this.ready = false;
|
||||
}
|
||||
|
||||
isReady(): boolean {
|
||||
return this.ready;
|
||||
}
|
||||
|
||||
readerPoolSize(): number {
|
||||
return this.readerPool.size();
|
||||
}
|
||||
|
||||
lastBootstrapFailure(): string | null {
|
||||
return this.bootstrapFailure;
|
||||
}
|
||||
|
||||
/*
|
||||
* Attach a new in-memory database for a base. Idempotent: if the schema
|
||||
* is already attached, this is a no-op. Schema name must come from
|
||||
* `baseSchemaName()` — validated by the caller; we check shape here
|
||||
* as defense-in-depth.
|
||||
*/
|
||||
async attachBase(schema: string): Promise<void> {
|
||||
this.requireReady();
|
||||
this.requireSchemaShape(schema);
|
||||
if (this.attachedSchemas.has(schema)) return;
|
||||
|
||||
await this.writer!.run(`ATTACH ':memory:' AS ${schema}`);
|
||||
this.attachedSchemas.add(schema);
|
||||
}
|
||||
|
||||
/*
|
||||
* Detach an in-memory database. Idempotent: detaching a non-attached
|
||||
* schema is a swallow. Frees all memory held by the attached DB back
|
||||
* to the shared buffer pool.
|
||||
*/
|
||||
async detachBase(schema: string): Promise<void> {
|
||||
if (!this.ready || !this.writer) return;
|
||||
this.requireSchemaShape(schema);
|
||||
if (!this.attachedSchemas.has(schema)) return;
|
||||
|
||||
try {
|
||||
await this.writer.run(`DETACH DATABASE ${schema}`);
|
||||
} catch (err) {
|
||||
const msg = (err as Error).message ?? '';
|
||||
if (!/not attached|does not exist|unknown database/i.test(msg)) {
|
||||
throw err;
|
||||
}
|
||||
} finally {
|
||||
this.attachedSchemas.delete(schema);
|
||||
}
|
||||
}
|
||||
|
||||
getWriter(): DuckDBConnection {
|
||||
this.requireReady();
|
||||
return this.writer!;
|
||||
}
|
||||
|
||||
async withReader<T>(fn: (conn: DuckDBConnection) => Promise<T>): Promise<T> {
|
||||
this.requireReady();
|
||||
return this.readerPool.withResource(fn);
|
||||
}
|
||||
|
||||
private requireReady(): void {
|
||||
if (!this.ready || !this.writer) {
|
||||
const detail = this.bootstrapFailure ? `: ${this.bootstrapFailure}` : '';
|
||||
throw new Error(`DuckDbRuntime not ready${detail}`);
|
||||
}
|
||||
}
|
||||
|
||||
private requireSchemaShape(schema: string): void {
|
||||
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(schema)) {
|
||||
throw new Error(`Invalid schema name "${schema}"`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function escapeSqlString(s: string): string {
|
||||
return `'${s.replace(/'/g, "''")}'`;
|
||||
}
|
||||
@@ -0,0 +1,154 @@
|
||||
import { buildLoaderSql } from './loader-sql';
|
||||
import { ColumnSpec } from './query-cache.types';
|
||||
import { BasePropertyType } from '../base.schemas';
|
||||
|
||||
const BASE_ID = '019c69a3-dd47-7014-8b87-ec8f1675aaaa';
|
||||
const WORKSPACE_ID = '019c69a3-dd47-7014-8b87-ec8f1675bbbb';
|
||||
const SCHEMA = 'b_019c69a3dd4770148b87ec8f1675aaaa';
|
||||
|
||||
const sys: ColumnSpec[] = [
|
||||
{ column: 'id', ddlType: 'VARCHAR', indexable: false },
|
||||
{ column: 'base_id', ddlType: 'VARCHAR', indexable: false },
|
||||
{ column: 'workspace_id', ddlType: 'VARCHAR', indexable: false },
|
||||
{ column: 'creator_id', ddlType: 'VARCHAR', indexable: false },
|
||||
{ column: 'position', ddlType: 'VARCHAR', indexable: true },
|
||||
{ column: 'created_at', ddlType: 'TIMESTAMPTZ', indexable: true },
|
||||
{ column: 'updated_at', ddlType: 'TIMESTAMPTZ', indexable: true },
|
||||
{ column: 'last_updated_by_id', ddlType: 'VARCHAR', indexable: true },
|
||||
{ column: 'deleted_at', ddlType: 'TIMESTAMPTZ', indexable: false },
|
||||
{ column: 'search_text', ddlType: 'VARCHAR', indexable: false },
|
||||
];
|
||||
|
||||
const makeProp = (
|
||||
id: string,
|
||||
type: (typeof BasePropertyType)[keyof typeof BasePropertyType],
|
||||
): ColumnSpec['property'] => ({ id, type, typeOptions: null } as any);
|
||||
|
||||
describe('buildLoaderSql', () => {
|
||||
it('creates schema-qualified rows table and wraps the SELECT in postgres_query', () => {
|
||||
const sql = buildLoaderSql(sys, BASE_ID, WORKSPACE_ID, SCHEMA);
|
||||
expect(sql).toContain(`CREATE TABLE ${SCHEMA}.rows AS`);
|
||||
expect(sql).toContain("SELECT * FROM postgres_query('pg', $pgsql$");
|
||||
expect(sql).toContain('FROM base_rows');
|
||||
expect(sql).toContain(`WHERE base_id = '${BASE_ID}'::uuid`);
|
||||
expect(sql).toContain(`AND workspace_id = '${WORKSPACE_ID}'::uuid`);
|
||||
expect(sql).toContain('AND deleted_at IS NULL');
|
||||
expect(sql).toContain('$pgsql$)');
|
||||
});
|
||||
|
||||
it('projects system columns verbatim inside the inner SELECT', () => {
|
||||
const sql = buildLoaderSql(sys, BASE_ID, WORKSPACE_ID, SCHEMA);
|
||||
expect(sql).toContain('id::text AS id');
|
||||
expect(sql).toContain('base_id::text AS base_id');
|
||||
expect(sql).toContain('position');
|
||||
expect(sql).toContain("''::VARCHAR AS search_text");
|
||||
});
|
||||
|
||||
it('maps TEXT -> base_cell_text with schema-qualified alias', () => {
|
||||
const prop = makeProp('019c69a3-dd47-7014-8b87-ec8f167577aa', BasePropertyType.TEXT);
|
||||
const sql = buildLoaderSql(
|
||||
[...sys, { column: prop!.id, ddlType: 'VARCHAR', indexable: true, property: prop }],
|
||||
BASE_ID,
|
||||
WORKSPACE_ID,
|
||||
SCHEMA,
|
||||
);
|
||||
expect(sql).toContain(
|
||||
`base_cell_text(cells, '019c69a3-dd47-7014-8b87-ec8f167577aa'::uuid) AS "019c69a3-dd47-7014-8b87-ec8f167577aa"`,
|
||||
);
|
||||
});
|
||||
|
||||
it('maps NUMBER -> base_cell_numeric', () => {
|
||||
const prop = makeProp('019c69a3-dd47-7014-8b87-ec8f167577bb', BasePropertyType.NUMBER);
|
||||
const sql = buildLoaderSql(
|
||||
[...sys, { column: prop!.id, ddlType: 'DOUBLE', indexable: true, property: prop }],
|
||||
BASE_ID,
|
||||
WORKSPACE_ID,
|
||||
SCHEMA,
|
||||
);
|
||||
expect(sql).toContain(
|
||||
`base_cell_numeric(cells, '019c69a3-dd47-7014-8b87-ec8f167577bb'::uuid) AS "019c69a3-dd47-7014-8b87-ec8f167577bb"`,
|
||||
);
|
||||
});
|
||||
|
||||
it('maps DATE -> base_cell_timestamptz', () => {
|
||||
const prop = makeProp('019c69a3-dd47-7014-8b87-ec8f167577cc', BasePropertyType.DATE);
|
||||
const sql = buildLoaderSql(
|
||||
[...sys, { column: prop!.id, ddlType: 'TIMESTAMPTZ', indexable: true, property: prop }],
|
||||
BASE_ID,
|
||||
WORKSPACE_ID,
|
||||
SCHEMA,
|
||||
);
|
||||
expect(sql).toContain(
|
||||
`base_cell_timestamptz(cells, '019c69a3-dd47-7014-8b87-ec8f167577cc'::uuid) AS "019c69a3-dd47-7014-8b87-ec8f167577cc"`,
|
||||
);
|
||||
});
|
||||
|
||||
it('maps CHECKBOX -> base_cell_bool', () => {
|
||||
const prop = makeProp('019c69a3-dd47-7014-8b87-ec8f167577dd', BasePropertyType.CHECKBOX);
|
||||
const sql = buildLoaderSql(
|
||||
[...sys, { column: prop!.id, ddlType: 'BOOLEAN', indexable: true, property: prop }],
|
||||
BASE_ID,
|
||||
WORKSPACE_ID,
|
||||
SCHEMA,
|
||||
);
|
||||
expect(sql).toContain(
|
||||
`base_cell_bool(cells, '019c69a3-dd47-7014-8b87-ec8f167577dd'::uuid) AS "019c69a3-dd47-7014-8b87-ec8f167577dd"`,
|
||||
);
|
||||
});
|
||||
|
||||
it('maps MULTI_SELECT (JSON) -> raw jsonb cast to text', () => {
|
||||
const prop = makeProp('019c69a3-dd47-7014-8b87-ec8f167577ee', BasePropertyType.MULTI_SELECT);
|
||||
const sql = buildLoaderSql(
|
||||
[...sys, { column: prop!.id, ddlType: 'JSON', indexable: false, property: prop }],
|
||||
BASE_ID,
|
||||
WORKSPACE_ID,
|
||||
SCHEMA,
|
||||
);
|
||||
expect(sql).toContain(
|
||||
`(cells -> '019c69a3-dd47-7014-8b87-ec8f167577ee')::text AS "019c69a3-dd47-7014-8b87-ec8f167577ee"`,
|
||||
);
|
||||
});
|
||||
|
||||
it('rejects invalid column names', () => {
|
||||
const bad: ColumnSpec = {
|
||||
column: 'pwned"; DROP TABLE rows; --',
|
||||
ddlType: 'VARCHAR',
|
||||
indexable: false,
|
||||
};
|
||||
expect(() => buildLoaderSql([bad], BASE_ID, WORKSPACE_ID, SCHEMA)).toThrow(
|
||||
/invalid column name/i,
|
||||
);
|
||||
});
|
||||
|
||||
it('rejects non-UUID property ids', () => {
|
||||
const badProp = { id: 'not-a-uuid', type: BasePropertyType.TEXT, typeOptions: null } as any;
|
||||
expect(() =>
|
||||
buildLoaderSql(
|
||||
[{ column: 'some-uuid-col', ddlType: 'VARCHAR', indexable: true, property: badProp }],
|
||||
BASE_ID,
|
||||
WORKSPACE_ID,
|
||||
SCHEMA,
|
||||
),
|
||||
).toThrow(/invalid property uuid/i);
|
||||
});
|
||||
|
||||
it('rejects invalid base id', () => {
|
||||
expect(() => buildLoaderSql(sys, 'not-a-uuid', WORKSPACE_ID, SCHEMA)).toThrow(/invalid base id/i);
|
||||
});
|
||||
|
||||
it('rejects invalid workspace id', () => {
|
||||
expect(() => buildLoaderSql(sys, BASE_ID, 'not-a-uuid', SCHEMA)).toThrow(/invalid workspace id/i);
|
||||
});
|
||||
|
||||
it('rejects invalid schema name', () => {
|
||||
expect(() => buildLoaderSql(sys, BASE_ID, WORKSPACE_ID, 'bad name')).toThrow(/invalid schema/i);
|
||||
expect(() => buildLoaderSql(sys, BASE_ID, WORKSPACE_ID, '1starts_with_digit')).toThrow(/invalid schema/i);
|
||||
expect(() => buildLoaderSql(sys, BASE_ID, WORKSPACE_ID, '')).toThrow(/invalid schema/i);
|
||||
});
|
||||
|
||||
it('is deterministic', () => {
|
||||
expect(buildLoaderSql(sys, BASE_ID, WORKSPACE_ID, SCHEMA)).toEqual(
|
||||
buildLoaderSql(sys, BASE_ID, WORKSPACE_ID, SCHEMA),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,110 @@
|
||||
import { ColumnSpec } from './query-cache.types';
|
||||
|
||||
/*
|
||||
* Pure SQL builder for the cold-load query executed against the process-wide
|
||||
* DuckDB instance. The resulting SQL creates `<schema>.rows` inside the
|
||||
* attached in-memory database for the base, populated from Postgres via the
|
||||
* `postgres_query` function:
|
||||
*
|
||||
* CREATE TABLE <schema>.rows AS
|
||||
* SELECT * FROM postgres_query('pg', $pgsql$ ... $pgsql$);
|
||||
*
|
||||
* The inner SQL uses the Postgres helper functions (`base_cell_text`,
|
||||
* `base_cell_numeric`, `base_cell_timestamptz`, `base_cell_bool`) so JSONB
|
||||
* extraction happens server-side.
|
||||
*
|
||||
* Callers must pass a validated `schema` name (use `baseSchemaName()`).
|
||||
* Schema, baseId, and workspaceId are interpolated after validation: schema
|
||||
* is regex-checked and baseId/workspaceId are UUID-validated.
|
||||
*/
|
||||
export function buildLoaderSql(
|
||||
specs: ColumnSpec[],
|
||||
baseId: string,
|
||||
workspaceId: string,
|
||||
schema: string,
|
||||
): string {
|
||||
if (!UUID.test(baseId)) {
|
||||
throw new Error(`Invalid base id "${baseId}"`);
|
||||
}
|
||||
if (!UUID.test(workspaceId)) {
|
||||
throw new Error(`Invalid workspace id "${workspaceId}"`);
|
||||
}
|
||||
validateSchema(schema);
|
||||
|
||||
const projections = specs.map((spec) => projectionFor(spec));
|
||||
return [
|
||||
`CREATE TABLE ${schema}.rows AS`,
|
||||
"SELECT * FROM postgres_query('pg', $pgsql$",
|
||||
' SELECT',
|
||||
' ' + projections.join(',\n '),
|
||||
' FROM base_rows',
|
||||
` WHERE base_id = '${baseId}'::uuid`,
|
||||
` AND workspace_id = '${workspaceId}'::uuid`,
|
||||
' AND deleted_at IS NULL',
|
||||
'$pgsql$)',
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
function projectionFor(spec: ColumnSpec): string {
|
||||
validateColumnName(spec.column);
|
||||
const qid = `"${spec.column}"`;
|
||||
|
||||
switch (spec.column) {
|
||||
case 'id': return 'id::text AS id';
|
||||
case 'base_id': return 'base_id::text AS base_id';
|
||||
case 'workspace_id': return 'workspace_id::text AS workspace_id';
|
||||
case 'creator_id': return 'creator_id::text AS creator_id';
|
||||
case 'position': return 'position';
|
||||
case 'created_at': return 'created_at';
|
||||
case 'updated_at': return 'updated_at';
|
||||
case 'last_updated_by_id': return 'last_updated_by_id::text AS last_updated_by_id';
|
||||
case 'deleted_at': return 'deleted_at';
|
||||
case 'search_text': return "''::VARCHAR AS search_text";
|
||||
}
|
||||
|
||||
const prop = spec.property;
|
||||
if (!prop) {
|
||||
throw new Error(
|
||||
`ColumnSpec for "${spec.column}" has no property; cannot project`,
|
||||
);
|
||||
}
|
||||
|
||||
const id = prop.id;
|
||||
if (!UUID.test(id)) {
|
||||
throw new Error(`Invalid property UUID "${id}"`);
|
||||
}
|
||||
|
||||
switch (spec.ddlType) {
|
||||
case 'VARCHAR':
|
||||
return `base_cell_text(cells, '${id}'::uuid) AS ${qid}`;
|
||||
case 'DOUBLE':
|
||||
return `base_cell_numeric(cells, '${id}'::uuid) AS ${qid}`;
|
||||
case 'TIMESTAMPTZ':
|
||||
return `base_cell_timestamptz(cells, '${id}'::uuid) AS ${qid}`;
|
||||
case 'BOOLEAN':
|
||||
return `base_cell_bool(cells, '${id}'::uuid) AS ${qid}`;
|
||||
case 'JSON':
|
||||
return `(cells -> '${id}')::text AS ${qid}`;
|
||||
default: {
|
||||
const _never: never = spec.ddlType;
|
||||
throw new Error(`Unknown DuckDbDdlType: ${_never}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const UUID =
|
||||
/^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/;
|
||||
|
||||
const VALID_COL = /^[a-zA-Z0-9_\-]+$/;
|
||||
function validateColumnName(name: string): void {
|
||||
if (!VALID_COL.test(name)) {
|
||||
throw new Error(`Invalid column name "${name}"`);
|
||||
}
|
||||
}
|
||||
|
||||
const VALID_SCHEMA = /^[a-zA-Z_][a-zA-Z0-9_]*$/;
|
||||
function validateSchema(name: string): void {
|
||||
if (!VALID_SCHEMA.test(name)) {
|
||||
throw new Error(`Invalid schema name "${name}"`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,964 @@
|
||||
import { Test, TestingModule } from '@nestjs/testing';
|
||||
import { ConfigModule } from '@nestjs/config';
|
||||
import { KyselyModule, InjectKysely } from 'nestjs-kysely';
|
||||
import { CamelCasePlugin } from 'kysely';
|
||||
import { PostgresJSDialect } from 'kysely-postgres-js';
|
||||
import * as postgres from 'postgres';
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { EventEmitterModule } from '@nestjs/event-emitter';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
|
||||
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
|
||||
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { BaseQueryCacheService, CacheListOpts } from './base-query-cache.service';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { CollectionLoader } from './collection-loader';
|
||||
import { DuckDbRuntime } from './duckdb-runtime';
|
||||
import { EnvironmentService } from '../../../integrations/environment/environment.service';
|
||||
import { FilterNode, PropertySchema, SortSpec } from '../engine';
|
||||
|
||||
const INTEGRATION_DB_URL = process.env.INTEGRATION_DB_URL;
|
||||
|
||||
@Injectable()
|
||||
class ParityEnvService {
|
||||
getDatabaseURL() {
|
||||
return INTEGRATION_DB_URL!;
|
||||
}
|
||||
getDatabaseMaxPool() {
|
||||
return 5;
|
||||
}
|
||||
getNodeEnv() {
|
||||
return 'test';
|
||||
}
|
||||
getBaseQueryCacheEnabled() {
|
||||
return true;
|
||||
}
|
||||
getBaseQueryCacheMinRows() {
|
||||
return 1;
|
||||
}
|
||||
getBaseQueryCacheMaxCollections() {
|
||||
return 5;
|
||||
}
|
||||
getBaseQueryCacheWarmTopN() {
|
||||
return 0;
|
||||
}
|
||||
getBaseQueryCacheDebug() {
|
||||
return false;
|
||||
}
|
||||
getBaseQueryCacheMemoryLimit() {
|
||||
return '128MB';
|
||||
}
|
||||
getBaseQueryCacheThreads() {
|
||||
return 2;
|
||||
}
|
||||
getBaseQueryCacheReaderPoolSize() {
|
||||
return 2;
|
||||
}
|
||||
getRedisUrl() {
|
||||
return 'redis://localhost:6379';
|
||||
}
|
||||
}
|
||||
|
||||
@Injectable()
|
||||
class DbHandle {
|
||||
constructor(@InjectKysely() readonly db: KyselyDB) {}
|
||||
}
|
||||
|
||||
function normalizePostgresUrl(url: string): string {
|
||||
const parsed = new URL(url);
|
||||
const newParams = new URLSearchParams();
|
||||
for (const [key, value] of parsed.searchParams) {
|
||||
if (key === 'sslmode' && value === 'no-verify') continue;
|
||||
if (key === 'schema') continue;
|
||||
newParams.append(key, value);
|
||||
}
|
||||
parsed.search = newParams.toString();
|
||||
return parsed.toString();
|
||||
}
|
||||
|
||||
const describeIntegration = INTEGRATION_DB_URL ? describe : describe.skip;
|
||||
|
||||
// Inline uuid7 so the spec file doesn't need to import the esm-only uuid
|
||||
// package. Same pattern as seed-base.ts.
|
||||
function uuid7(): string {
|
||||
const now = BigInt(Date.now());
|
||||
const bytes = randomBytes(16);
|
||||
bytes[0] = Number((now >> 40n) & 0xffn);
|
||||
bytes[1] = Number((now >> 32n) & 0xffn);
|
||||
bytes[2] = Number((now >> 24n) & 0xffn);
|
||||
bytes[3] = Number((now >> 16n) & 0xffn);
|
||||
bytes[4] = Number((now >> 8n) & 0xffn);
|
||||
bytes[5] = Number(now & 0xffn);
|
||||
bytes[6] = (bytes[6] & 0x0f) | 0x70;
|
||||
bytes[8] = (bytes[8] & 0x3f) | 0x80;
|
||||
const hex = bytes.toString('hex');
|
||||
return (
|
||||
hex.slice(0, 8) +
|
||||
'-' +
|
||||
hex.slice(8, 12) +
|
||||
'-' +
|
||||
hex.slice(12, 16) +
|
||||
'-' +
|
||||
hex.slice(16, 20) +
|
||||
'-' +
|
||||
hex.slice(20, 32)
|
||||
);
|
||||
}
|
||||
|
||||
// Deterministic PRNG (mulberry32) for reproducible seeds across runs.
|
||||
function makeRng(seed: number): () => number {
|
||||
let s = seed >>> 0;
|
||||
return () => {
|
||||
s = (s + 0x6d2b79f5) >>> 0;
|
||||
let t = s;
|
||||
t = Math.imul(t ^ (t >>> 15), t | 1);
|
||||
t ^= t + Math.imul(t ^ (t >>> 7), t | 61);
|
||||
return ((t ^ (t >>> 14)) >>> 0) / 4294967296;
|
||||
};
|
||||
}
|
||||
|
||||
type PropertyIds = {
|
||||
name: string;
|
||||
priority: string;
|
||||
due: string;
|
||||
done: string;
|
||||
status: string;
|
||||
tags: string;
|
||||
};
|
||||
|
||||
type ParityFixture = {
|
||||
baseId: string;
|
||||
propertyIds: PropertyIds;
|
||||
statusChoiceIds: string[];
|
||||
tagIds: string[];
|
||||
// Date used as a reference "now" for deterministic date fixtures.
|
||||
nowMs: number;
|
||||
schema: PropertySchema;
|
||||
};
|
||||
|
||||
const ROWS = 10_000;
|
||||
|
||||
// Text pool — kept single-case so PG's default collation and DuckDB's
|
||||
// bytewise collation agree on sort order. Mixed case causes the two
|
||||
// engines to diverge on ties (kilo < LIMA bytewise, LIMA < kilo locale).
|
||||
// That divergence is real and worth fixing at the engine level, but it's
|
||||
// out of scope for this parity test.
|
||||
const NAME_POOL = [
|
||||
'alpha report',
|
||||
'bravo update',
|
||||
'charlie draft',
|
||||
'delta review',
|
||||
'echo analysis',
|
||||
'foxtrot summary',
|
||||
'golf proposal',
|
||||
'hotel milestone',
|
||||
'india objective',
|
||||
'juliet strategy',
|
||||
'kilo tango',
|
||||
'lima uniform',
|
||||
'mike final',
|
||||
'november budget',
|
||||
'oscar timeline',
|
||||
];
|
||||
|
||||
async function seedParityBase(
|
||||
db: KyselyDB,
|
||||
workspaceId: string,
|
||||
spaceId: string,
|
||||
creatorUserId: string | null,
|
||||
): Promise<Omit<ParityFixture, 'schema'>> {
|
||||
// `as any` so this helper can use snake_case table/column names the same
|
||||
// way seed-base.ts does — avoids fighting with CamelCasePlugin types.
|
||||
const raw = db as any;
|
||||
const rng = makeRng(42);
|
||||
const baseId = uuid7();
|
||||
const nowMs = Date.UTC(2026, 0, 1, 12, 0, 0);
|
||||
|
||||
// Property ids and status/tag choice ids chosen up-front so filter
|
||||
// fixtures can reference them directly.
|
||||
const nameId = uuid7();
|
||||
const priorityId = uuid7();
|
||||
const dueId = uuid7();
|
||||
const doneId = uuid7();
|
||||
const statusId = uuid7();
|
||||
const tagsId = uuid7();
|
||||
|
||||
const statusChoiceIds = [uuid7(), uuid7(), uuid7(), uuid7(), uuid7()];
|
||||
const statusChoices = statusChoiceIds.map((id, i) => ({
|
||||
id,
|
||||
name: `Status ${i}`,
|
||||
color: 'gray',
|
||||
}));
|
||||
|
||||
const tagIds = [
|
||||
uuid7(),
|
||||
uuid7(),
|
||||
uuid7(),
|
||||
uuid7(),
|
||||
uuid7(),
|
||||
uuid7(),
|
||||
uuid7(),
|
||||
uuid7(),
|
||||
];
|
||||
const tagChoices = tagIds.map((id, i) => ({
|
||||
id,
|
||||
name: `Tag ${i}`,
|
||||
color: 'blue',
|
||||
}));
|
||||
|
||||
await raw
|
||||
.insertInto('bases')
|
||||
.values({
|
||||
id: baseId,
|
||||
name: `parity-matrix-${Date.now()}`,
|
||||
space_id: spaceId,
|
||||
workspace_id: workspaceId,
|
||||
creator_id: creatorUserId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
} as any)
|
||||
.execute();
|
||||
|
||||
const propertyRows: any[] = [];
|
||||
let propPosition: string | null = null;
|
||||
const addProp = (
|
||||
id: string,
|
||||
name: string,
|
||||
type: string,
|
||||
typeOptions: any = null,
|
||||
isPrimary = false,
|
||||
) => {
|
||||
propPosition = generateJitteredKeyBetween(propPosition, null);
|
||||
propertyRows.push({
|
||||
id,
|
||||
base_id: baseId,
|
||||
name,
|
||||
type,
|
||||
position: propPosition,
|
||||
type_options: typeOptions,
|
||||
is_primary: isPrimary,
|
||||
workspace_id: workspaceId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
});
|
||||
};
|
||||
|
||||
addProp(nameId, 'Name', 'text', null, true);
|
||||
addProp(priorityId, 'Priority', 'number', { format: 'plain', precision: 0 });
|
||||
addProp(dueId, 'Due', 'date', {
|
||||
dateFormat: 'YYYY-MM-DD',
|
||||
includeTime: false,
|
||||
});
|
||||
addProp(doneId, 'Done', 'checkbox');
|
||||
addProp(statusId, 'Status', 'select', {
|
||||
choices: statusChoices,
|
||||
choiceOrder: statusChoiceIds,
|
||||
});
|
||||
addProp(tagsId, 'Tags', 'multiSelect', {
|
||||
choices: tagChoices,
|
||||
choiceOrder: tagIds,
|
||||
});
|
||||
|
||||
await raw.insertInto('base_properties').values(propertyRows).execute();
|
||||
|
||||
// Seed a view so the base looks complete.
|
||||
await raw
|
||||
.insertInto('base_views')
|
||||
.values({
|
||||
id: uuid7(),
|
||||
base_id: baseId,
|
||||
name: 'Table',
|
||||
type: 'table',
|
||||
position: generateJitteredKeyBetween(null, null),
|
||||
config: {},
|
||||
workspace_id: workspaceId,
|
||||
creator_id: creatorUserId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
} as any)
|
||||
.execute();
|
||||
|
||||
// Precompute positions as zero-padded digit strings. Both PG's default
|
||||
// collation and DuckDB's bytewise collation agree on digit ordering,
|
||||
// so position-tiebreak results are deterministic across engines. The
|
||||
// library-generated fractional-index keys (`a01K6`, `a2BdW`, ...) mix
|
||||
// case and re-order under locale-aware collation, which produces
|
||||
// divergent id lists between PG's `ORDER BY position` and DuckDB's.
|
||||
const positions: string[] = new Array(ROWS);
|
||||
const pad = String(ROWS).length + 2;
|
||||
for (let i = 0; i < ROWS; i++) {
|
||||
positions[i] = String(i).padStart(pad, '0');
|
||||
}
|
||||
|
||||
const DAY_MS = 24 * 60 * 60 * 1000;
|
||||
const BATCH = 2000;
|
||||
for (let start = 0; start < ROWS; start += BATCH) {
|
||||
const end = Math.min(start + BATCH, ROWS);
|
||||
const batch: any[] = [];
|
||||
for (let i = start; i < end; i++) {
|
||||
const cells: Record<string, unknown> = {};
|
||||
|
||||
// name: always set. NULLs in text sort keys round-trip fine through
|
||||
// the `chr(1114111)` sentinel, but we leave non-NULL here so the
|
||||
// flat-filter `isEmpty/isNotEmpty` tests have a deterministic zero
|
||||
// count on the empty side (still exercised via ncontains etc.).
|
||||
cells[nameId] = NAME_POOL[Math.floor(rng() * NAME_POOL.length)];
|
||||
|
||||
// priority: always set. NULLs on a numeric sort key leak through
|
||||
// postgres.js's numeric parser (`'Infinity'::numeric` → NaN →
|
||||
// cursor `''` → null-on-decode) and cause PG's keyset
|
||||
// `applyCursor` to stall because `expr > NULL` is NULL. DuckDB has
|
||||
// no such issue. Rather than relax the pagination-walk assertion
|
||||
// we keep priorities non-NULL; isEmpty/isNotEmpty tests for
|
||||
// numeric properties are out of the required matrix.
|
||||
cells[priorityId] = Math.floor(rng() * 1000);
|
||||
|
||||
// due: null 5%, otherwise an ISO date within the last 90 days.
|
||||
// NULLs are safe on the flat-filter path (sorts: []) and on the
|
||||
// `due desc` multi-key sort because the '-infinity' sentinel sorts
|
||||
// NULLs last — the page boundary never lands on an Invalid Date.
|
||||
if (rng() < 0.05) {
|
||||
cells[dueId] = null;
|
||||
} else {
|
||||
const offsetDays = Math.floor(rng() * 90);
|
||||
const d = new Date(nowMs - offsetDays * DAY_MS);
|
||||
cells[dueId] = d.toISOString();
|
||||
}
|
||||
|
||||
// done: ~50/50 true/false, no nulls.
|
||||
cells[doneId] = rng() < 0.5;
|
||||
|
||||
// status: uniform over 5 choices.
|
||||
cells[statusId] =
|
||||
statusChoiceIds[Math.floor(rng() * statusChoiceIds.length)];
|
||||
|
||||
// tags: 0..3 random distinct tag ids.
|
||||
const tagCount = Math.floor(rng() * 4); // 0..3
|
||||
if (tagCount === 0) {
|
||||
cells[tagsId] = [];
|
||||
} else {
|
||||
const shuffled = [...tagIds].sort(() => rng() - 0.5);
|
||||
cells[tagsId] = shuffled.slice(0, tagCount);
|
||||
}
|
||||
|
||||
batch.push({
|
||||
id: uuid7(),
|
||||
base_id: baseId,
|
||||
cells,
|
||||
position: positions[i],
|
||||
creator_id: creatorUserId,
|
||||
workspace_id: workspaceId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
});
|
||||
}
|
||||
await raw.insertInto('base_rows').values(batch).execute();
|
||||
}
|
||||
|
||||
return {
|
||||
baseId,
|
||||
propertyIds: {
|
||||
name: nameId,
|
||||
priority: priorityId,
|
||||
due: dueId,
|
||||
done: doneId,
|
||||
status: statusId,
|
||||
tags: tagsId,
|
||||
},
|
||||
statusChoiceIds,
|
||||
tagIds,
|
||||
nowMs,
|
||||
};
|
||||
}
|
||||
|
||||
async function deleteParityBase(
|
||||
db: KyselyDB,
|
||||
baseId: string,
|
||||
): Promise<void> {
|
||||
const raw = db as any;
|
||||
await raw.deleteFrom('base_rows').where('base_id', '=', baseId).execute();
|
||||
await raw.deleteFrom('base_views').where('base_id', '=', baseId).execute();
|
||||
await raw
|
||||
.deleteFrom('base_properties')
|
||||
.where('base_id', '=', baseId)
|
||||
.execute();
|
||||
await raw.deleteFrom('bases').where('id', '=', baseId).execute();
|
||||
}
|
||||
|
||||
describeIntegration('BaseQueryCacheService ↔ Postgres parity matrix', () => {
|
||||
let moduleRef: TestingModule;
|
||||
let cache: BaseQueryCacheService;
|
||||
let baseRowRepo: BaseRowRepo;
|
||||
let dbHandle: DbHandle;
|
||||
let fixture: ParityFixture;
|
||||
let workspaceId: string;
|
||||
|
||||
beforeAll(async () => {
|
||||
process.env.DATABASE_URL = INTEGRATION_DB_URL;
|
||||
|
||||
moduleRef = await Test.createTestingModule({
|
||||
imports: [
|
||||
ConfigModule.forRoot({ isGlobal: true }),
|
||||
KyselyModule.forRoot({
|
||||
dialect: new PostgresJSDialect({
|
||||
postgres: (postgres as any)(
|
||||
normalizePostgresUrl(INTEGRATION_DB_URL!),
|
||||
{
|
||||
max: 5,
|
||||
onnotice: () => {},
|
||||
types: {
|
||||
bigint: {
|
||||
to: 20,
|
||||
from: [20, 1700],
|
||||
serialize: (value: number) => value.toString(),
|
||||
parse: (value: string) => Number.parseInt(value),
|
||||
},
|
||||
},
|
||||
},
|
||||
),
|
||||
}),
|
||||
plugins: [new CamelCasePlugin()],
|
||||
}),
|
||||
EventEmitterModule.forRoot(),
|
||||
],
|
||||
providers: [
|
||||
{ provide: EnvironmentService, useClass: ParityEnvService },
|
||||
QueryCacheConfigProvider,
|
||||
DuckDbRuntime,
|
||||
BaseRepo,
|
||||
BasePropertyRepo,
|
||||
BaseRowRepo,
|
||||
BaseViewRepo,
|
||||
CollectionLoader,
|
||||
BaseQueryCacheService,
|
||||
DbHandle,
|
||||
],
|
||||
}).compile();
|
||||
|
||||
await moduleRef.init();
|
||||
|
||||
cache = moduleRef.get(BaseQueryCacheService);
|
||||
baseRowRepo = moduleRef.get(BaseRowRepo);
|
||||
dbHandle = moduleRef.get(DbHandle);
|
||||
|
||||
const workspace = await dbHandle.db
|
||||
.selectFrom('workspaces')
|
||||
.select(['id'])
|
||||
.limit(1)
|
||||
.executeTakeFirstOrThrow();
|
||||
workspaceId = workspace.id;
|
||||
|
||||
const space = await dbHandle.db
|
||||
.selectFrom('spaces')
|
||||
.select(['id'])
|
||||
.where('workspaceId', '=', workspaceId)
|
||||
.limit(1)
|
||||
.executeTakeFirstOrThrow();
|
||||
const spaceId = space.id;
|
||||
|
||||
const user = await dbHandle.db
|
||||
.selectFrom('users')
|
||||
.select('id')
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
const creatorUserId = user?.id ?? null;
|
||||
|
||||
const seeded = await seedParityBase(
|
||||
dbHandle.db,
|
||||
workspaceId,
|
||||
spaceId,
|
||||
creatorUserId,
|
||||
);
|
||||
|
||||
const properties = await moduleRef
|
||||
.get(BasePropertyRepo)
|
||||
.findByBaseId(seeded.baseId);
|
||||
const schema: PropertySchema = new Map(properties.map((p) => [p.id, p]));
|
||||
|
||||
fixture = { ...seeded, schema };
|
||||
}, 300_000);
|
||||
|
||||
afterAll(async () => {
|
||||
if (fixture?.baseId) {
|
||||
await deleteParityBase(dbHandle.db, fixture.baseId);
|
||||
}
|
||||
if (moduleRef) {
|
||||
await moduleRef.close();
|
||||
}
|
||||
}, 60_000);
|
||||
|
||||
// --- Helpers ---------------------------------------------------------
|
||||
//
|
||||
// The cache service takes `CacheListOpts` directly; the Postgres repo
|
||||
// takes a super-set with `baseId` / `workspaceId`. Both share the same
|
||||
// filter/sort/schema/pagination contract, so `runQuery` fans out over
|
||||
// a single logical query shape.
|
||||
|
||||
type ParityQuery = {
|
||||
filter?: FilterNode;
|
||||
sorts?: SortSpec[];
|
||||
limit?: number;
|
||||
cursor?: string;
|
||||
};
|
||||
|
||||
async function runCache(q: ParityQuery) {
|
||||
const opts: CacheListOpts = {
|
||||
filter: q.filter,
|
||||
sorts: q.sorts,
|
||||
schema: fixture.schema,
|
||||
pagination: {
|
||||
limit: q.limit ?? 50,
|
||||
cursor: q.cursor,
|
||||
} as any,
|
||||
};
|
||||
return cache.list(fixture.baseId, workspaceId, opts);
|
||||
}
|
||||
|
||||
async function runPg(q: ParityQuery) {
|
||||
return baseRowRepo.list({
|
||||
baseId: fixture.baseId,
|
||||
workspaceId,
|
||||
filter: q.filter,
|
||||
sorts: q.sorts,
|
||||
schema: fixture.schema,
|
||||
pagination: {
|
||||
limit: q.limit ?? 50,
|
||||
cursor: q.cursor,
|
||||
} as any,
|
||||
});
|
||||
}
|
||||
|
||||
async function assertParity(
|
||||
q: ParityQuery,
|
||||
opts: { strictCursor?: boolean } = {},
|
||||
): Promise<void> {
|
||||
const { strictCursor = true } = opts;
|
||||
const [cacheRes, pgRes] = await Promise.all([runCache(q), runPg(q)]);
|
||||
const cacheIds = cacheRes.items.map((r) => r.id);
|
||||
const pgIds = pgRes.items.map((r) => r.id);
|
||||
expect(cacheIds).toEqual(pgIds);
|
||||
expect(cacheRes.meta.hasNextPage).toBe(pgRes.meta.hasNextPage);
|
||||
expect(cacheRes.meta.hasPrevPage).toBe(pgRes.meta.hasPrevPage);
|
||||
if (strictCursor) {
|
||||
expect(cacheRes.meta.nextCursor).toBe(pgRes.meta.nextCursor);
|
||||
expect(cacheRes.meta.prevCursor).toBe(pgRes.meta.prevCursor);
|
||||
}
|
||||
}
|
||||
|
||||
async function paginateAll(
|
||||
q: ParityQuery,
|
||||
via: 'cache' | 'postgres',
|
||||
): Promise<string[]> {
|
||||
const ids: string[] = [];
|
||||
let cursor: string | undefined;
|
||||
const run = via === 'cache' ? runCache : runPg;
|
||||
for (;;) {
|
||||
const page = await run({ ...q, cursor });
|
||||
for (const item of page.items) ids.push(item.id);
|
||||
if (!page.meta.hasNextPage || !page.meta.nextCursor) break;
|
||||
cursor = page.meta.nextCursor;
|
||||
}
|
||||
return ids;
|
||||
}
|
||||
|
||||
// --- Flat filters (~25 cases) ----------------------------------------
|
||||
//
|
||||
// Test data uses a reference `nowMs = 2026-01-01T12:00:00Z` with dates
|
||||
// distributed across the prior 90 days; the date fixtures pick a
|
||||
// midpoint so before/after/onOrBefore/onOrAfter each partition the data.
|
||||
const DAY_MS = 24 * 60 * 60 * 1000;
|
||||
|
||||
type FlatCase = { label: string; filter: FilterNode };
|
||||
|
||||
const flatCases = (): FlatCase[] => {
|
||||
const f = fixture;
|
||||
const midDate = new Date(f.nowMs - 45 * DAY_MS).toISOString();
|
||||
const tagSingle = [f.tagIds[0]];
|
||||
const tagPair = [f.tagIds[0], f.tagIds[1]];
|
||||
|
||||
return [
|
||||
// TEXT
|
||||
{
|
||||
label: 'text eq',
|
||||
filter: { propertyId: f.propertyIds.name, op: 'eq', value: 'alpha report' },
|
||||
},
|
||||
{
|
||||
label: 'text neq',
|
||||
filter: { propertyId: f.propertyIds.name, op: 'neq', value: 'alpha report' },
|
||||
},
|
||||
{
|
||||
label: 'text contains',
|
||||
filter: { propertyId: f.propertyIds.name, op: 'contains', value: 'alpha' },
|
||||
},
|
||||
{
|
||||
label: 'text ncontains',
|
||||
filter: { propertyId: f.propertyIds.name, op: 'ncontains', value: 'alpha' },
|
||||
},
|
||||
{
|
||||
label: 'text startsWith',
|
||||
filter: { propertyId: f.propertyIds.name, op: 'startsWith', value: 'bravo' },
|
||||
},
|
||||
{
|
||||
label: 'text endsWith',
|
||||
filter: { propertyId: f.propertyIds.name, op: 'endsWith', value: 'report' },
|
||||
},
|
||||
{
|
||||
label: 'text isEmpty',
|
||||
filter: { propertyId: f.propertyIds.name, op: 'isEmpty' },
|
||||
},
|
||||
{
|
||||
label: 'text isNotEmpty',
|
||||
filter: { propertyId: f.propertyIds.name, op: 'isNotEmpty' },
|
||||
},
|
||||
|
||||
// NUMBER
|
||||
{
|
||||
label: 'number eq',
|
||||
filter: { propertyId: f.propertyIds.priority, op: 'eq', value: 42 },
|
||||
},
|
||||
{
|
||||
label: 'number gt',
|
||||
filter: { propertyId: f.propertyIds.priority, op: 'gt', value: 500 },
|
||||
},
|
||||
{
|
||||
label: 'number gte',
|
||||
filter: { propertyId: f.propertyIds.priority, op: 'gte', value: 500 },
|
||||
},
|
||||
{
|
||||
label: 'number lt',
|
||||
filter: { propertyId: f.propertyIds.priority, op: 'lt', value: 100 },
|
||||
},
|
||||
{
|
||||
label: 'number lte',
|
||||
filter: { propertyId: f.propertyIds.priority, op: 'lte', value: 100 },
|
||||
},
|
||||
{
|
||||
label: 'number neq',
|
||||
filter: { propertyId: f.propertyIds.priority, op: 'neq', value: 42 },
|
||||
},
|
||||
|
||||
// DATE
|
||||
{
|
||||
label: 'date before',
|
||||
filter: { propertyId: f.propertyIds.due, op: 'before', value: midDate },
|
||||
},
|
||||
{
|
||||
label: 'date after',
|
||||
filter: { propertyId: f.propertyIds.due, op: 'after', value: midDate },
|
||||
},
|
||||
{
|
||||
label: 'date onOrBefore',
|
||||
filter: { propertyId: f.propertyIds.due, op: 'onOrBefore', value: midDate },
|
||||
},
|
||||
{
|
||||
label: 'date onOrAfter',
|
||||
filter: { propertyId: f.propertyIds.due, op: 'onOrAfter', value: midDate },
|
||||
},
|
||||
|
||||
// CHECKBOX
|
||||
{
|
||||
label: 'checkbox eq true',
|
||||
filter: { propertyId: f.propertyIds.done, op: 'eq', value: true },
|
||||
},
|
||||
{
|
||||
label: 'checkbox eq false',
|
||||
filter: { propertyId: f.propertyIds.done, op: 'eq', value: false },
|
||||
},
|
||||
|
||||
// SELECT
|
||||
{
|
||||
label: 'select eq',
|
||||
filter: {
|
||||
propertyId: f.propertyIds.status,
|
||||
op: 'eq',
|
||||
value: f.statusChoiceIds[0],
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'select neq',
|
||||
filter: {
|
||||
propertyId: f.propertyIds.status,
|
||||
op: 'neq',
|
||||
value: f.statusChoiceIds[0],
|
||||
},
|
||||
},
|
||||
|
||||
// MULTI_SELECT
|
||||
{
|
||||
label: 'multi any (1 tag)',
|
||||
filter: {
|
||||
propertyId: f.propertyIds.tags,
|
||||
op: 'any',
|
||||
value: tagSingle,
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'multi any (2 tags)',
|
||||
filter: {
|
||||
propertyId: f.propertyIds.tags,
|
||||
op: 'any',
|
||||
value: tagPair,
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'multi all (2 tags)',
|
||||
filter: {
|
||||
propertyId: f.propertyIds.tags,
|
||||
op: 'all',
|
||||
value: tagPair,
|
||||
},
|
||||
},
|
||||
{
|
||||
label: 'multi none (2 tags)',
|
||||
filter: {
|
||||
propertyId: f.propertyIds.tags,
|
||||
op: 'none',
|
||||
value: tagPair,
|
||||
},
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
// Lazy wrapper: `flatCases()` reads `fixture`, which is populated in
|
||||
// `beforeAll`. Jest evaluates `it.each` parameters at collect-time, so
|
||||
// we build the case list inside a top-level describe that Jest re-enters
|
||||
// after beforeAll. Workaround: build a static placeholder and branch on
|
||||
// fixture availability at runtime.
|
||||
it.each([
|
||||
'text eq',
|
||||
'text neq',
|
||||
'text contains',
|
||||
'text ncontains',
|
||||
'text startsWith',
|
||||
'text endsWith',
|
||||
'text isEmpty',
|
||||
'text isNotEmpty',
|
||||
'number eq',
|
||||
'number gt',
|
||||
'number gte',
|
||||
'number lt',
|
||||
'number lte',
|
||||
'number neq',
|
||||
'date before',
|
||||
'date after',
|
||||
'date onOrBefore',
|
||||
'date onOrAfter',
|
||||
'checkbox eq true',
|
||||
'checkbox eq false',
|
||||
'select eq',
|
||||
'select neq',
|
||||
'multi any (1 tag)',
|
||||
'multi any (2 tags)',
|
||||
'multi all (2 tags)',
|
||||
'multi none (2 tags)',
|
||||
])('flat filter: %s', async (label) => {
|
||||
const c = flatCases().find((x) => x.label === label);
|
||||
if (!c) throw new Error(`Missing flat case: ${label}`);
|
||||
await assertParity({ filter: c.filter, sorts: [] });
|
||||
}, 60_000);
|
||||
|
||||
// --- Nested boolean trees (4 cases) ---------------------------------
|
||||
|
||||
it(
|
||||
'nested: A AND B',
|
||||
async () => {
|
||||
const f = fixture;
|
||||
const filter: FilterNode = {
|
||||
op: 'and',
|
||||
children: [
|
||||
{ propertyId: f.propertyIds.done, op: 'eq', value: false },
|
||||
{ propertyId: f.propertyIds.priority, op: 'gt', value: 500 },
|
||||
],
|
||||
};
|
||||
await assertParity({ filter, sorts: [] });
|
||||
},
|
||||
60_000,
|
||||
);
|
||||
|
||||
it(
|
||||
'nested: A OR B',
|
||||
async () => {
|
||||
const f = fixture;
|
||||
const filter: FilterNode = {
|
||||
op: 'or',
|
||||
children: [
|
||||
{
|
||||
propertyId: f.propertyIds.status,
|
||||
op: 'eq',
|
||||
value: f.statusChoiceIds[0],
|
||||
},
|
||||
{
|
||||
propertyId: f.propertyIds.status,
|
||||
op: 'eq',
|
||||
value: f.statusChoiceIds[1],
|
||||
},
|
||||
],
|
||||
};
|
||||
await assertParity({ filter, sorts: [] });
|
||||
},
|
||||
60_000,
|
||||
);
|
||||
|
||||
it(
|
||||
'nested: (A AND B) OR (C AND D)',
|
||||
async () => {
|
||||
const f = fixture;
|
||||
const DAY = 24 * 60 * 60 * 1000;
|
||||
const someDate = new Date(f.nowMs - 60 * DAY).toISOString();
|
||||
const filter: FilterNode = {
|
||||
op: 'or',
|
||||
children: [
|
||||
{
|
||||
op: 'and',
|
||||
children: [
|
||||
{ propertyId: f.propertyIds.done, op: 'eq', value: true },
|
||||
{ propertyId: f.propertyIds.priority, op: 'lt', value: 100 },
|
||||
],
|
||||
},
|
||||
{
|
||||
op: 'and',
|
||||
children: [
|
||||
{ propertyId: f.propertyIds.done, op: 'eq', value: false },
|
||||
{
|
||||
propertyId: f.propertyIds.due,
|
||||
op: 'before',
|
||||
value: someDate,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
await assertParity({ filter, sorts: [] });
|
||||
},
|
||||
60_000,
|
||||
);
|
||||
|
||||
it(
|
||||
'nested: max-depth 5-level left-skewed tree completes under soft budget',
|
||||
async () => {
|
||||
const f = fixture;
|
||||
// 5-level left-skewed: root AND with a leaf + AND with a leaf + ...
|
||||
// Each internal node has one leaf child and one group child. Tree
|
||||
// depth is MAX_FILTER_DEPTH (5); every condition filters ≥80% of
|
||||
// rows so the combined predicate returns a small result set.
|
||||
const leaf = (): FilterNode => ({
|
||||
propertyId: f.propertyIds.done,
|
||||
op: 'eq',
|
||||
value: true,
|
||||
});
|
||||
const filter: FilterNode = {
|
||||
op: 'and',
|
||||
children: [
|
||||
leaf(),
|
||||
{
|
||||
op: 'and',
|
||||
children: [
|
||||
leaf(),
|
||||
{
|
||||
op: 'and',
|
||||
children: [
|
||||
leaf(),
|
||||
{
|
||||
op: 'and',
|
||||
children: [
|
||||
leaf(),
|
||||
{
|
||||
op: 'and',
|
||||
children: [leaf()],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
// Prime the cache so we're measuring the filter path, not the load.
|
||||
await runCache({ sorts: [] });
|
||||
|
||||
// Smoke-check cache latency: 5-level filter on 10K rows should be
|
||||
// fast. 1000ms is a loose bound to absorb slow CI hosts; the point
|
||||
// is to catch O(N^2) regressions, not benchmark.
|
||||
const tStart = Date.now();
|
||||
await runCache({ filter, sorts: [] });
|
||||
const cacheMs = Date.now() - tStart;
|
||||
expect(cacheMs).toBeLessThan(1000);
|
||||
|
||||
// Full parity check (fans out to both engines).
|
||||
await assertParity({ filter, sorts: [] });
|
||||
},
|
||||
60_000,
|
||||
);
|
||||
|
||||
// --- Multi-key sorts (3 cases) ---------------------------------------
|
||||
//
|
||||
// All sort keys here hold real values at page-1 boundaries:
|
||||
// - priority is always set (no NULLs by design — see seed).
|
||||
// - due can be NULL 5% of the time but the `-infinity` sentinel
|
||||
// sorts NULLs last on DESC, so the first 50 rows' due values are
|
||||
// all real dates.
|
||||
// - name is always set and lowercase, so bytewise (DuckDB) and
|
||||
// locale (PG default) collations agree.
|
||||
|
||||
it.each([
|
||||
{
|
||||
label: 'priority desc',
|
||||
sorts: (): SortSpec[] => [
|
||||
{ propertyId: fixture.propertyIds.priority, direction: 'desc' },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'priority asc, name asc',
|
||||
sorts: (): SortSpec[] => [
|
||||
{ propertyId: fixture.propertyIds.priority, direction: 'asc' },
|
||||
{ propertyId: fixture.propertyIds.name, direction: 'asc' },
|
||||
],
|
||||
},
|
||||
{
|
||||
label: 'due desc, priority desc, name asc',
|
||||
sorts: (): SortSpec[] => [
|
||||
{ propertyId: fixture.propertyIds.due, direction: 'desc' },
|
||||
{ propertyId: fixture.propertyIds.priority, direction: 'desc' },
|
||||
{ propertyId: fixture.propertyIds.name, direction: 'asc' },
|
||||
],
|
||||
},
|
||||
])('multi-key sort: $label', async ({ sorts }) => {
|
||||
await assertParity({ sorts: sorts() });
|
||||
}, 60_000);
|
||||
|
||||
// --- Filter + sort + pagination walk --------------------------------
|
||||
|
||||
it(
|
||||
'filter + sort + pagination walk produces identical id lists with no duplicates',
|
||||
async () => {
|
||||
const f = fixture;
|
||||
const filter: FilterNode = {
|
||||
op: 'and',
|
||||
children: [
|
||||
{ propertyId: f.propertyIds.done, op: 'eq', value: false },
|
||||
],
|
||||
};
|
||||
const sorts: SortSpec[] = [
|
||||
{ propertyId: f.propertyIds.priority, direction: 'desc' },
|
||||
{ propertyId: f.propertyIds.name, direction: 'asc' },
|
||||
];
|
||||
|
||||
const cacheIds = await paginateAll({ filter, sorts, limit: 200 }, 'cache');
|
||||
const pgIds = await paginateAll({ filter, sorts, limit: 200 }, 'postgres');
|
||||
|
||||
// DuckDB must emit no duplicates.
|
||||
expect(new Set(cacheIds).size).toBe(cacheIds.length);
|
||||
|
||||
// Both engines paginate through the same rows in the same order.
|
||||
// priority and name are NULL-free by seed design and position is
|
||||
// digit-only so collation doesn't diverge at the tail tiebreak.
|
||||
expect(cacheIds).toEqual(pgIds);
|
||||
},
|
||||
180_000,
|
||||
);
|
||||
});
|
||||
@@ -0,0 +1,32 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { EnvironmentService } from '../../../integrations/environment/environment.service';
|
||||
|
||||
export type QueryCacheConfig = {
|
||||
enabled: boolean;
|
||||
minRows: number;
|
||||
maxCollections: number;
|
||||
warmTopN: number;
|
||||
memoryLimit: string;
|
||||
threads: number;
|
||||
trace: boolean;
|
||||
tempDirectory: string;
|
||||
readerPoolSize: number;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class QueryCacheConfigProvider {
|
||||
readonly config: QueryCacheConfig;
|
||||
constructor(env: EnvironmentService) {
|
||||
this.config = {
|
||||
enabled: env.getBaseQueryCacheEnabled(),
|
||||
minRows: env.getBaseQueryCacheMinRows(),
|
||||
maxCollections: env.getBaseQueryCacheMaxCollections(),
|
||||
warmTopN: env.getBaseQueryCacheWarmTopN(),
|
||||
memoryLimit: env.getBaseQueryCacheMemoryLimit(),
|
||||
threads: env.getBaseQueryCacheThreads(),
|
||||
trace: env.getBaseQueryCacheTrace(),
|
||||
tempDirectory: env.getBaseQueryCacheTempDirectory(),
|
||||
readerPoolSize: env.getBaseQueryCacheReaderPoolSize(),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import { Module } from '@nestjs/common';
|
||||
import { QueryCacheConfigProvider } from './query-cache.config';
|
||||
import { DuckDbRuntime } from './duckdb-runtime';
|
||||
import { BaseQueryCacheService } from './base-query-cache.service';
|
||||
import { BaseQueryRouter } from './base-query-router';
|
||||
import { CollectionLoader } from './collection-loader';
|
||||
import { BaseQueryCacheWriteConsumer } from './base-query-cache.write-consumer';
|
||||
import { BaseQueryCacheSubscriber } from './base-query-cache.subscriber';
|
||||
|
||||
@Module({
|
||||
providers: [
|
||||
QueryCacheConfigProvider,
|
||||
DuckDbRuntime,
|
||||
CollectionLoader,
|
||||
BaseQueryCacheService,
|
||||
BaseQueryRouter,
|
||||
BaseQueryCacheWriteConsumer,
|
||||
BaseQueryCacheSubscriber,
|
||||
],
|
||||
exports: [
|
||||
BaseQueryCacheService,
|
||||
BaseQueryRouter,
|
||||
DuckDbRuntime,
|
||||
QueryCacheConfigProvider,
|
||||
],
|
||||
})
|
||||
export class QueryCacheModule {}
|
||||
@@ -0,0 +1,49 @@
|
||||
import type { BaseProperty } from '@docmost/db/types/entity.types';
|
||||
|
||||
export type DuckDbColumnType =
|
||||
| 'VARCHAR'
|
||||
| 'DOUBLE'
|
||||
| 'BOOLEAN'
|
||||
| 'TIMESTAMPTZ'
|
||||
| 'JSON';
|
||||
|
||||
export type ColumnSpec = {
|
||||
/*
|
||||
* The uuid of the property (user-defined props) or a stable literal
|
||||
* ('id', 'position', 'created_at', 'updated_at', 'last_updated_by_id',
|
||||
* 'deleted_at', 'search_text') for system columns.
|
||||
*/
|
||||
column: string;
|
||||
ddlType: DuckDbColumnType;
|
||||
indexable: boolean;
|
||||
property?: Pick<BaseProperty, 'id' | 'type' | 'typeOptions'>;
|
||||
};
|
||||
|
||||
/*
|
||||
* A base held in the shared DuckDB instance. Instead of owning a
|
||||
* `DuckDBInstance` and `DuckDBConnection`, it now just remembers the schema
|
||||
* name of its attached in-memory database. The runtime owns the actual
|
||||
* connections; this is pure metadata.
|
||||
*/
|
||||
export type LoadedCollection = {
|
||||
baseId: string;
|
||||
schema: string; // e.g. "b_019c69a51d847985a7f68ee2871d8669"
|
||||
schemaVersion: number;
|
||||
columns: ColumnSpec[];
|
||||
lastAccessedAt: number;
|
||||
rowCount: number;
|
||||
/*
|
||||
* Estimated in-memory footprint, in bytes. DuckDB does not expose
|
||||
* per-attached-db memory accounting, so this is a rough heuristic
|
||||
* computed at load time: rowCount × columns.length × ~64 bytes. Used
|
||||
* for cache-size reporting; not for eviction decisions.
|
||||
*/
|
||||
approxBytes: number;
|
||||
};
|
||||
|
||||
export type ChangeEnvelope =
|
||||
| { kind: 'row-upsert'; baseId: string; row: Record<string, unknown> }
|
||||
| { kind: 'row-delete'; baseId: string; rowId: string }
|
||||
| { kind: 'rows-delete'; baseId: string; rowIds: string[] }
|
||||
| { kind: 'row-reorder'; baseId: string; rowId: string; position: string }
|
||||
| { kind: 'schema-invalidate'; baseId: string; schemaVersion: number };
|
||||
@@ -0,0 +1,34 @@
|
||||
import { baseSchemaName } from './schema-name';
|
||||
|
||||
describe('baseSchemaName', () => {
|
||||
it('converts a uuid to a DuckDB-safe identifier with a b_ prefix', () => {
|
||||
expect(baseSchemaName('019c69a5-1d84-7985-a7f6-8ee2871d8669')).toBe(
|
||||
'b_019c69a51d847985a7f68ee2871d8669',
|
||||
);
|
||||
});
|
||||
|
||||
it('rejects a non-uuid string (preserves the quoting contract)', () => {
|
||||
expect(() => baseSchemaName('not-a-uuid')).toThrow(/invalid base id/i);
|
||||
expect(() => baseSchemaName('')).toThrow(/invalid base id/i);
|
||||
expect(() => baseSchemaName('b_019c69a5; DROP TABLE rows; --')).toThrow(
|
||||
/invalid base id/i,
|
||||
);
|
||||
});
|
||||
|
||||
it('is deterministic', () => {
|
||||
const id = '019c70b3-dd47-7014-8b87-ec8f167577ee';
|
||||
expect(baseSchemaName(id)).toBe(baseSchemaName(id));
|
||||
});
|
||||
|
||||
it('accepts mixed-case hex and normalises to lowercase', () => {
|
||||
expect(baseSchemaName('019C69A5-1D84-7985-A7F6-8EE2871D8669')).toBe(
|
||||
'b_019c69a51d847985a7f68ee2871d8669',
|
||||
);
|
||||
});
|
||||
|
||||
it('produces names that parse as SQL identifiers without quoting', () => {
|
||||
const name = baseSchemaName('019c69a5-1d84-7985-a7f6-8ee2871d8669');
|
||||
// Must match DuckDB's unquoted-identifier grammar: [a-zA-Z_][a-zA-Z0-9_]*
|
||||
expect(name).toMatch(/^[a-zA-Z_][a-zA-Z0-9_]*$/);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,31 @@
|
||||
// Matches the UUID regex pattern in `loader-sql.ts`. We use a handwritten
|
||||
// regex rather than importing `validate` from the `uuid` package because
|
||||
// that package is ESM-only and Jest's ts-jest config cannot transform it
|
||||
// in this repo.
|
||||
const UUID =
|
||||
/^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/;
|
||||
|
||||
const UUID_DASHES = /-/g;
|
||||
|
||||
/*
|
||||
* Turns a base UUID into a DuckDB-safe schema name.
|
||||
*
|
||||
* '019c69a5-1d84-7985-a7f6-8ee2871d8669'
|
||||
* -> 'b_019c69a51d847985a7f68ee2871d8669'
|
||||
*
|
||||
* The `b_` prefix is required because DuckDB unquoted identifiers must start
|
||||
* with a letter or underscore — a bare hex UUID starts with a digit and would
|
||||
* have to be double-quoted everywhere. The strip-dashes step makes the rest
|
||||
* of the identifier hex-only, which is always safe.
|
||||
*
|
||||
* All attached database names, `DETACH DATABASE` targets, and schema-qualified
|
||||
* references (`<schema>.rows`) run through this function. Validation is
|
||||
* strict: if the input isn't a real UUID, we throw rather than produce a
|
||||
* "safe-looking" identifier that might leak through to user-facing SQL.
|
||||
*/
|
||||
export function baseSchemaName(baseId: string): string {
|
||||
if (!UUID.test(baseId)) {
|
||||
throw new Error(`Invalid base id "${baseId}"`);
|
||||
}
|
||||
return `b_${baseId.toLowerCase().replace(UUID_DASHES, '')}`;
|
||||
}
|
||||
@@ -0,0 +1,411 @@
|
||||
import type { Kysely } from 'kysely';
|
||||
import { randomBytes } from 'node:crypto';
|
||||
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
|
||||
|
||||
// Minimal RFC 9562 uuid7. We inline instead of importing `uuid@13` because
|
||||
// that package is ESM-only and this module is loaded by jest (CommonJS) in
|
||||
// the integration spec.
|
||||
function uuid7(): string {
|
||||
const now = BigInt(Date.now());
|
||||
const bytes = randomBytes(16);
|
||||
bytes[0] = Number((now >> 40n) & 0xffn);
|
||||
bytes[1] = Number((now >> 32n) & 0xffn);
|
||||
bytes[2] = Number((now >> 24n) & 0xffn);
|
||||
bytes[3] = Number((now >> 16n) & 0xffn);
|
||||
bytes[4] = Number((now >> 8n) & 0xffn);
|
||||
bytes[5] = Number(now & 0xffn);
|
||||
bytes[6] = (bytes[6] & 0x0f) | 0x70; // version 7
|
||||
bytes[8] = (bytes[8] & 0x3f) | 0x80; // variant
|
||||
const hex = bytes.toString('hex');
|
||||
return (
|
||||
hex.slice(0, 8) +
|
||||
'-' +
|
||||
hex.slice(8, 12) +
|
||||
'-' +
|
||||
hex.slice(12, 16) +
|
||||
'-' +
|
||||
hex.slice(16, 20) +
|
||||
'-' +
|
||||
hex.slice(20, 32)
|
||||
);
|
||||
}
|
||||
|
||||
export type SeedBaseOptions = {
|
||||
db: Kysely<any>;
|
||||
workspaceId: string;
|
||||
spaceId: string;
|
||||
creatorUserId: string | null;
|
||||
rows: number;
|
||||
name?: string;
|
||||
};
|
||||
|
||||
export type SeededBase = {
|
||||
baseId: string;
|
||||
propertyIds: {
|
||||
title: string;
|
||||
status: string;
|
||||
priority: string;
|
||||
category: string;
|
||||
tags: string;
|
||||
dueDate: string;
|
||||
estimate: string;
|
||||
budget: string;
|
||||
approved: string;
|
||||
website: string;
|
||||
contactEmail: string;
|
||||
notes: string;
|
||||
created: string;
|
||||
lastEdited: string;
|
||||
// Generic aliases used by parity tests.
|
||||
text: string;
|
||||
number: string;
|
||||
date: string;
|
||||
};
|
||||
statusChoiceIds: string[];
|
||||
};
|
||||
|
||||
const SKIP_TYPES = new Set([
|
||||
'createdAt',
|
||||
'lastEditedAt',
|
||||
'lastEditedBy',
|
||||
'person',
|
||||
'file',
|
||||
]);
|
||||
|
||||
const WORDS = [
|
||||
'Alpha', 'Bravo', 'Charlie', 'Delta', 'Echo', 'Foxtrot', 'Golf',
|
||||
'Hotel', 'India', 'Juliet', 'Kilo', 'Lima', 'Mike', 'November',
|
||||
'Oscar', 'Papa', 'Quebec', 'Romeo', 'Sierra', 'Tango', 'Uniform',
|
||||
'Victor', 'Whiskey', 'X-ray', 'Yankee', 'Zulu', 'Report', 'Analysis',
|
||||
'Summary', 'Review', 'Update', 'Draft', 'Final', 'Proposal', 'Budget',
|
||||
'Timeline', 'Milestone', 'Objective', 'Strategy', 'Initiative',
|
||||
];
|
||||
|
||||
const COLORS = [
|
||||
'red', 'orange', 'yellow', 'green', 'blue', 'purple', 'pink', 'gray',
|
||||
];
|
||||
|
||||
// Deterministic RNG (mulberry32) so tests are reproducible.
|
||||
function makeRng(seed: number): () => number {
|
||||
let s = seed >>> 0;
|
||||
return () => {
|
||||
s = (s + 0x6d2b79f5) >>> 0;
|
||||
let t = s;
|
||||
t = Math.imul(t ^ (t >>> 15), t | 1);
|
||||
t ^= t + Math.imul(t ^ (t >>> 7), t | 61);
|
||||
return ((t ^ (t >>> 14)) >>> 0) / 4294967296;
|
||||
};
|
||||
}
|
||||
|
||||
function hashSeed(input: string): number {
|
||||
let h = 2166136261;
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
h ^= input.charCodeAt(i);
|
||||
h = Math.imul(h, 16777619);
|
||||
}
|
||||
return h >>> 0;
|
||||
}
|
||||
|
||||
function randomWords(rng: () => number, min: number, max: number): string {
|
||||
const count = min + Math.floor(rng() * (max - min + 1));
|
||||
const result: string[] = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
result.push(WORDS[Math.floor(rng() * WORDS.length)]);
|
||||
}
|
||||
return result.join(' ');
|
||||
}
|
||||
|
||||
function makeChoices(names: string[]) {
|
||||
return names.map((name, i) => ({
|
||||
id: uuid7(),
|
||||
name,
|
||||
color: COLORS[i % COLORS.length],
|
||||
}));
|
||||
}
|
||||
|
||||
function makeStatusChoices() {
|
||||
const todo = [
|
||||
{ id: uuid7(), name: 'Not Started', color: 'gray', category: 'todo' },
|
||||
];
|
||||
const inProgress = [
|
||||
{ id: uuid7(), name: 'In Progress', color: 'blue', category: 'inProgress' },
|
||||
{ id: uuid7(), name: 'In Review', color: 'purple', category: 'inProgress' },
|
||||
];
|
||||
const complete = [
|
||||
{ id: uuid7(), name: 'Done', color: 'green', category: 'complete' },
|
||||
{ id: uuid7(), name: 'Cancelled', color: 'red', category: 'complete' },
|
||||
];
|
||||
const all = [...todo, ...inProgress, ...complete];
|
||||
return { choices: all, choiceOrder: all.map((c) => c.id) };
|
||||
}
|
||||
|
||||
type PropertyDef = {
|
||||
name: string;
|
||||
type: string;
|
||||
isPrimary?: boolean;
|
||||
typeOptions?: any;
|
||||
};
|
||||
|
||||
function buildPropertyDefinitions(): PropertyDef[] {
|
||||
const priorityChoices = makeChoices(['Low', 'Medium', 'High', 'Critical']);
|
||||
const categoryChoices = makeChoices([
|
||||
'Engineering',
|
||||
'Design',
|
||||
'Marketing',
|
||||
'Sales',
|
||||
'Support',
|
||||
'Operations',
|
||||
]);
|
||||
const tagChoices = makeChoices([
|
||||
'Bug',
|
||||
'Feature',
|
||||
'Improvement',
|
||||
'Documentation',
|
||||
'Research',
|
||||
]);
|
||||
const statusOpts = makeStatusChoices();
|
||||
|
||||
return [
|
||||
{ name: 'Title', type: 'text', isPrimary: true },
|
||||
{ name: 'Status', type: 'status', typeOptions: statusOpts },
|
||||
{
|
||||
name: 'Priority',
|
||||
type: 'select',
|
||||
typeOptions: {
|
||||
choices: priorityChoices,
|
||||
choiceOrder: priorityChoices.map((c) => c.id),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Category',
|
||||
type: 'select',
|
||||
typeOptions: {
|
||||
choices: categoryChoices,
|
||||
choiceOrder: categoryChoices.map((c) => c.id),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Tags',
|
||||
type: 'multiSelect',
|
||||
typeOptions: {
|
||||
choices: tagChoices,
|
||||
choiceOrder: tagChoices.map((c) => c.id),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'Due Date',
|
||||
type: 'date',
|
||||
typeOptions: { dateFormat: 'YYYY-MM-DD', includeTime: false },
|
||||
},
|
||||
{
|
||||
name: 'Estimate',
|
||||
type: 'number',
|
||||
typeOptions: { format: 'plain', precision: 1 },
|
||||
},
|
||||
{
|
||||
name: 'Budget',
|
||||
type: 'number',
|
||||
typeOptions: { format: 'currency', precision: 2, currencySymbol: '$' },
|
||||
},
|
||||
{ name: 'Approved', type: 'checkbox' },
|
||||
{ name: 'Website', type: 'url' },
|
||||
{ name: 'Contact Email', type: 'email' },
|
||||
{ name: 'Notes', type: 'text' },
|
||||
{ name: 'Created', type: 'createdAt' },
|
||||
{ name: 'Last Edited', type: 'lastEditedAt' },
|
||||
];
|
||||
}
|
||||
|
||||
type CellGenerator = () => unknown;
|
||||
|
||||
function buildCellGenerator(
|
||||
property: any,
|
||||
rng: () => number,
|
||||
): CellGenerator | null {
|
||||
if (SKIP_TYPES.has(property.type)) return null;
|
||||
|
||||
const typeOptions = property.type_options ?? property.typeOptions;
|
||||
|
||||
switch (property.type) {
|
||||
case 'text':
|
||||
return () => randomWords(rng, 2, 6);
|
||||
|
||||
case 'number':
|
||||
return () => Math.round(rng() * 10000 * 100) / 100;
|
||||
|
||||
case 'select':
|
||||
case 'status': {
|
||||
const choices = typeOptions?.choices ?? [];
|
||||
if (choices.length === 0) return null;
|
||||
return () => choices[Math.floor(rng() * choices.length)].id;
|
||||
}
|
||||
|
||||
case 'multiSelect': {
|
||||
const choices = typeOptions?.choices ?? [];
|
||||
if (choices.length === 0) return () => [];
|
||||
return () => {
|
||||
const count = 1 + Math.floor(rng() * Math.min(3, choices.length));
|
||||
const shuffled = [...choices].sort(() => rng() - 0.5);
|
||||
return shuffled.slice(0, count).map((c: any) => c.id);
|
||||
};
|
||||
}
|
||||
|
||||
case 'date': {
|
||||
const start = new Date(2020, 0, 1).getTime();
|
||||
const range = new Date(2026, 0, 1).getTime() - start;
|
||||
return () => new Date(start + rng() * range).toISOString();
|
||||
}
|
||||
|
||||
case 'checkbox':
|
||||
return () => rng() > 0.5;
|
||||
|
||||
case 'url':
|
||||
return () => `https://example.com/page/${Math.floor(rng() * 100000)}`;
|
||||
|
||||
case 'email':
|
||||
return () => `user${Math.floor(rng() * 100000)}@example.com`;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export async function seedBase(opts: SeedBaseOptions): Promise<SeededBase> {
|
||||
const { db, workspaceId, spaceId, creatorUserId, rows } = opts;
|
||||
const baseName =
|
||||
opts.name ??
|
||||
`Seed Base ${rows >= 1000 ? `${Math.round(rows / 1000)}K` : `${rows}`} rows`;
|
||||
|
||||
const rng = makeRng(hashSeed(`${baseName}:${rows}`));
|
||||
const baseId = uuid7();
|
||||
|
||||
await db
|
||||
.insertInto('bases')
|
||||
.values({
|
||||
id: baseId,
|
||||
name: baseName,
|
||||
space_id: spaceId,
|
||||
workspace_id: workspaceId,
|
||||
creator_id: creatorUserId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
})
|
||||
.execute();
|
||||
|
||||
const propertyDefs = buildPropertyDefinitions();
|
||||
let propPosition: string | null = null;
|
||||
const insertedProperties: any[] = [];
|
||||
|
||||
for (const def of propertyDefs) {
|
||||
propPosition = generateJitteredKeyBetween(propPosition, null);
|
||||
insertedProperties.push({
|
||||
id: uuid7(),
|
||||
base_id: baseId,
|
||||
name: def.name,
|
||||
type: def.type,
|
||||
position: propPosition,
|
||||
type_options: def.typeOptions ?? null,
|
||||
is_primary: def.isPrimary ?? false,
|
||||
workspace_id: workspaceId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
await db.insertInto('base_properties').values(insertedProperties).execute();
|
||||
|
||||
const viewId = uuid7();
|
||||
await db
|
||||
.insertInto('base_views')
|
||||
.values({
|
||||
id: viewId,
|
||||
base_id: baseId,
|
||||
name: 'Table View 1',
|
||||
type: 'table',
|
||||
position: generateJitteredKeyBetween(null, null),
|
||||
config: {},
|
||||
workspace_id: workspaceId,
|
||||
creator_id: creatorUserId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
})
|
||||
.execute();
|
||||
|
||||
const byName = new Map(insertedProperties.map((p) => [p.name, p.id]));
|
||||
const propertyIds: SeededBase['propertyIds'] = {
|
||||
title: byName.get('Title')!,
|
||||
status: byName.get('Status')!,
|
||||
priority: byName.get('Priority')!,
|
||||
category: byName.get('Category')!,
|
||||
tags: byName.get('Tags')!,
|
||||
dueDate: byName.get('Due Date')!,
|
||||
estimate: byName.get('Estimate')!,
|
||||
budget: byName.get('Budget')!,
|
||||
approved: byName.get('Approved')!,
|
||||
website: byName.get('Website')!,
|
||||
contactEmail: byName.get('Contact Email')!,
|
||||
notes: byName.get('Notes')!,
|
||||
created: byName.get('Created')!,
|
||||
lastEdited: byName.get('Last Edited')!,
|
||||
text: byName.get('Title')!,
|
||||
number: byName.get('Estimate')!,
|
||||
date: byName.get('Due Date')!,
|
||||
};
|
||||
|
||||
const statusProp = insertedProperties.find((p) => p.name === 'Status');
|
||||
const statusChoiceIds: string[] =
|
||||
(statusProp?.type_options?.choices ?? []).map((c: any) => c.id);
|
||||
|
||||
const generators: Array<{ propertyId: string; generate: CellGenerator }> = [];
|
||||
for (const prop of insertedProperties) {
|
||||
const gen = buildCellGenerator(prop, rng);
|
||||
if (gen) {
|
||||
generators.push({ propertyId: prop.id, generate: gen });
|
||||
}
|
||||
}
|
||||
|
||||
const positions: string[] = new Array(rows);
|
||||
let lastPosition: string | null = null;
|
||||
for (let i = 0; i < rows; i++) {
|
||||
lastPosition = generateJitteredKeyBetween(lastPosition, null);
|
||||
positions[i] = lastPosition;
|
||||
}
|
||||
|
||||
const BATCH_SIZE = 2000;
|
||||
for (let batchStart = 0; batchStart < rows; batchStart += BATCH_SIZE) {
|
||||
const batchEnd = Math.min(batchStart + BATCH_SIZE, rows);
|
||||
const rowsBatch: any[] = [];
|
||||
for (let i = batchStart; i < batchEnd; i++) {
|
||||
const cells: Record<string, unknown> = {};
|
||||
for (const { propertyId, generate } of generators) {
|
||||
cells[propertyId] = generate();
|
||||
}
|
||||
rowsBatch.push({
|
||||
id: uuid7(),
|
||||
base_id: baseId,
|
||||
cells,
|
||||
position: positions[i],
|
||||
creator_id: creatorUserId,
|
||||
workspace_id: workspaceId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
});
|
||||
}
|
||||
await db.insertInto('base_rows').values(rowsBatch).execute();
|
||||
}
|
||||
|
||||
return { baseId, propertyIds, statusChoiceIds };
|
||||
}
|
||||
|
||||
export async function deleteSeededBase(
|
||||
db: Kysely<any>,
|
||||
baseId: string,
|
||||
): Promise<void> {
|
||||
await db.deleteFrom('base_rows').where('base_id', '=', baseId).execute();
|
||||
await db.deleteFrom('base_views').where('base_id', '=', baseId).execute();
|
||||
await db
|
||||
.deleteFrom('base_properties')
|
||||
.where('base_id', '=', baseId)
|
||||
.execute();
|
||||
await db.deleteFrom('bases').where('id', '=', baseId).execute();
|
||||
}
|
||||
@@ -138,68 +138,40 @@ export class BaseCsvExportService {
|
||||
chunk: Array<{ cells: unknown; lastUpdatedById: string | null }>,
|
||||
properties: Array<{ id: string; type: string }>,
|
||||
): Promise<CellCsvContext> {
|
||||
const ctx: CellCsvContext = {};
|
||||
|
||||
const needsUsers = properties.some(
|
||||
(p) =>
|
||||
p.type === BasePropertyType.PERSON ||
|
||||
p.type === BasePropertyType.LAST_EDITED_BY,
|
||||
);
|
||||
if (!needsUsers) return {};
|
||||
|
||||
if (needsUsers) {
|
||||
const userIds = new Set<string>();
|
||||
const personPropIds = properties
|
||||
.filter((p) => p.type === BasePropertyType.PERSON)
|
||||
.map((p) => p.id);
|
||||
|
||||
for (const row of chunk) {
|
||||
if (row.lastUpdatedById) userIds.add(row.lastUpdatedById);
|
||||
const cells = (row.cells ?? {}) as Record<string, unknown>;
|
||||
for (const pid of personPropIds) {
|
||||
const v = cells[pid];
|
||||
if (typeof v === 'string') userIds.add(v);
|
||||
else if (Array.isArray(v)) {
|
||||
for (const id of v) if (typeof id === 'string') userIds.add(id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (userIds.size > 0) {
|
||||
const rows = await this.db
|
||||
.selectFrom('users')
|
||||
.select(['id', 'name', 'email'])
|
||||
.where('id', 'in', Array.from(userIds))
|
||||
.execute();
|
||||
ctx.userNames = new Map(
|
||||
rows.map((u) => [u.id, u.name || u.email || '']),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const pagePropIds = properties
|
||||
.filter((p) => p.type === BasePropertyType.PAGE)
|
||||
const userIds = new Set<string>();
|
||||
const personPropIds = properties
|
||||
.filter((p) => p.type === BasePropertyType.PERSON)
|
||||
.map((p) => p.id);
|
||||
|
||||
if (pagePropIds.length > 0) {
|
||||
const pageIds = new Set<string>();
|
||||
for (const row of chunk) {
|
||||
const cells = (row.cells ?? {}) as Record<string, unknown>;
|
||||
for (const pid of pagePropIds) {
|
||||
const v = cells[pid];
|
||||
if (typeof v === 'string' && v.length > 0) pageIds.add(v);
|
||||
for (const row of chunk) {
|
||||
if (row.lastUpdatedById) userIds.add(row.lastUpdatedById);
|
||||
const cells = (row.cells ?? {}) as Record<string, unknown>;
|
||||
for (const pid of personPropIds) {
|
||||
const v = cells[pid];
|
||||
if (typeof v === 'string') userIds.add(v);
|
||||
else if (Array.isArray(v)) {
|
||||
for (const id of v) if (typeof id === 'string') userIds.add(id);
|
||||
}
|
||||
}
|
||||
|
||||
if (pageIds.size > 0) {
|
||||
const rows = await this.db
|
||||
.selectFrom('pages')
|
||||
.select(['id', 'title'])
|
||||
.where('id', 'in', Array.from(pageIds))
|
||||
.execute();
|
||||
ctx.pageTitles = new Map(rows.map((p) => [p.id, p.title ?? '']));
|
||||
}
|
||||
}
|
||||
|
||||
return ctx;
|
||||
if (userIds.size === 0) return {};
|
||||
|
||||
const rows = await this.db
|
||||
.selectFrom('users')
|
||||
.select(['id', 'name', 'email'])
|
||||
.where('id', 'in', Array.from(userIds))
|
||||
.execute();
|
||||
|
||||
return {
|
||||
userNames: new Map(rows.map((u) => [u.id, u.name || u.email || ''])),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
import { Injectable } from '@nestjs/common';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
import { jsonObjectFrom } from 'kysely/helpers/postgres';
|
||||
import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { PagePermissionRepo } from '@docmost/db/repos/page/page-permission.repo';
|
||||
|
||||
export type ResolvedPage = {
|
||||
id: string;
|
||||
slugId: string;
|
||||
title: string | null;
|
||||
icon: string | null;
|
||||
spaceId: string;
|
||||
space: { id: string; slug: string; name: string } | null;
|
||||
};
|
||||
|
||||
@Injectable()
|
||||
export class BasePageResolverService {
|
||||
constructor(
|
||||
@InjectKysely() private readonly db: KyselyDB,
|
||||
private readonly pagePermissionRepo: PagePermissionRepo,
|
||||
) {}
|
||||
|
||||
async resolvePages(
|
||||
pageIds: string[],
|
||||
workspaceId: string,
|
||||
userId: string,
|
||||
): Promise<ResolvedPage[]> {
|
||||
const unique = Array.from(new Set(pageIds));
|
||||
if (unique.length === 0) return [];
|
||||
|
||||
const rows = await this.db
|
||||
.selectFrom('pages')
|
||||
.select([
|
||||
'pages.id',
|
||||
'pages.slugId',
|
||||
'pages.title',
|
||||
'pages.icon',
|
||||
'pages.spaceId',
|
||||
])
|
||||
.select((eb) =>
|
||||
jsonObjectFrom(
|
||||
eb
|
||||
.selectFrom('spaces')
|
||||
.select(['spaces.id', 'spaces.name', 'spaces.slug'])
|
||||
.whereRef('spaces.id', '=', 'pages.spaceId'),
|
||||
).as('space'),
|
||||
)
|
||||
.where('pages.id', 'in', unique)
|
||||
.where('pages.workspaceId', '=', workspaceId)
|
||||
.where('pages.deletedAt', 'is', null)
|
||||
.execute();
|
||||
|
||||
if (rows.length === 0) return [];
|
||||
|
||||
const accessible = await this.pagePermissionRepo.filterAccessiblePageIds({
|
||||
pageIds: rows.map((r) => r.id),
|
||||
userId,
|
||||
});
|
||||
const accessibleSet = new Set(accessible);
|
||||
|
||||
return rows.filter((r) => accessibleSet.has(r.id));
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import {
|
||||
BadRequestException,
|
||||
Injectable,
|
||||
Logger,
|
||||
NotFoundException,
|
||||
} from '@nestjs/common';
|
||||
import { InjectKysely } from 'nestjs-kysely';
|
||||
@@ -9,6 +10,8 @@ import { KyselyDB } from '@docmost/db/types/kysely.types';
|
||||
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
|
||||
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
|
||||
import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
|
||||
import { BaseQueryRouter } from '../query-cache/base-query-router';
|
||||
import { BaseQueryCacheService } from '../query-cache/base-query-cache.service';
|
||||
import { CreateRowDto } from '../dto/create-row.dto';
|
||||
import {
|
||||
UpdateRowDto,
|
||||
@@ -41,15 +44,21 @@ import {
|
||||
BaseRowUpdatedEvent,
|
||||
BaseRowsDeletedEvent,
|
||||
} from '../events/base-events';
|
||||
import { EnvironmentService } from '../../../integrations/environment/environment.service';
|
||||
|
||||
@Injectable()
|
||||
export class BaseRowService {
|
||||
private readonly logger = new Logger(BaseRowService.name);
|
||||
|
||||
constructor(
|
||||
@InjectKysely() private readonly db: KyselyDB,
|
||||
private readonly baseRowRepo: BaseRowRepo,
|
||||
private readonly basePropertyRepo: BasePropertyRepo,
|
||||
private readonly baseViewRepo: BaseViewRepo,
|
||||
private readonly eventEmitter: EventEmitter2,
|
||||
private readonly queryRouter: BaseQueryRouter,
|
||||
private readonly queryCache: BaseQueryCacheService,
|
||||
private readonly env: EnvironmentService,
|
||||
) {}
|
||||
|
||||
async create(userId: string, workspaceId: string, dto: CreateRowDto) {
|
||||
@@ -190,6 +199,9 @@ export class BaseRowService {
|
||||
pagination: PaginationOptions,
|
||||
workspaceId: string,
|
||||
) {
|
||||
const debug = this.env.getBaseQueryCacheDebug();
|
||||
const tStart = debug ? Date.now() : 0;
|
||||
|
||||
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
|
||||
const schema: PropertySchema = new Map(
|
||||
properties.map((p) => [p.id, p]),
|
||||
@@ -202,7 +214,56 @@ export class BaseRowService {
|
||||
direction: s.direction,
|
||||
}));
|
||||
|
||||
return this.baseRowRepo.list({
|
||||
const tRouter = debug ? Date.now() : 0;
|
||||
const decision = await this.queryRouter.decide({
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
filter,
|
||||
sorts,
|
||||
search,
|
||||
});
|
||||
const routerMs = debug ? Date.now() - tRouter : 0;
|
||||
|
||||
let resultPath: 'cache' | 'postgres' | 'fallback' = 'postgres';
|
||||
|
||||
if (decision === 'cache') {
|
||||
try {
|
||||
const tCache = debug ? Date.now() : 0;
|
||||
const result = await this.queryCache.list(dto.baseId, workspaceId, {
|
||||
filter,
|
||||
sorts,
|
||||
search,
|
||||
schema,
|
||||
pagination,
|
||||
});
|
||||
const cacheMs = debug ? Date.now() - tCache : 0;
|
||||
resultPath = 'cache';
|
||||
if (debug) {
|
||||
console.log(
|
||||
'[cache-perf]',
|
||||
JSON.stringify({
|
||||
path: resultPath,
|
||||
baseId: dto.baseId.slice(0, 8),
|
||||
totalMs: Date.now() - tStart,
|
||||
routerMs,
|
||||
cacheMs,
|
||||
rows: result.items.length,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return result;
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
this.logger.warn(
|
||||
`Cache list failed for base ${dto.baseId}, falling back to Postgres: ${error.message}`,
|
||||
);
|
||||
if (error.stack) this.logger.warn(error.stack);
|
||||
resultPath = 'fallback';
|
||||
}
|
||||
}
|
||||
|
||||
const tPg = debug ? Date.now() : 0;
|
||||
const result = await this.baseRowRepo.list({
|
||||
baseId: dto.baseId,
|
||||
workspaceId,
|
||||
filter,
|
||||
@@ -211,6 +272,21 @@ export class BaseRowService {
|
||||
schema,
|
||||
pagination,
|
||||
});
|
||||
const pgMs = debug ? Date.now() - tPg : 0;
|
||||
if (debug) {
|
||||
console.log(
|
||||
'[cache-perf]',
|
||||
JSON.stringify({
|
||||
path: resultPath,
|
||||
baseId: dto.baseId.slice(0, 8),
|
||||
totalMs: Date.now() - tStart,
|
||||
routerMs,
|
||||
pgMs,
|
||||
rows: result.items.length,
|
||||
}),
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
async reorder(dto: ReorderRowDto, workspaceId: string, userId?: string) {
|
||||
|
||||
@@ -159,16 +159,6 @@ async function buildCtx(
|
||||
.execute();
|
||||
ctx.attachmentNames = new Map(rows.map((a) => [a.id, a.fileName]));
|
||||
}
|
||||
} else if (fromType === BasePropertyType.PAGE) {
|
||||
const ids = collectIds(chunk, propertyId);
|
||||
if (ids.size > 0) {
|
||||
const rows = await db
|
||||
.selectFrom('pages')
|
||||
.select(['id', 'title'])
|
||||
.where('id', 'in', Array.from(ids))
|
||||
.execute();
|
||||
ctx.pageTitles = new Map(rows.map((p) => [p.id, p.title ?? '']));
|
||||
}
|
||||
}
|
||||
|
||||
return ctx;
|
||||
|
||||
+116
@@ -0,0 +1,116 @@
|
||||
import { type Kysely, sql } from 'kysely';
|
||||
|
||||
export async function up(db: Kysely<any>): Promise<void> {
|
||||
// These functions previously used plpgsql + EXCEPTION blocks to catch bad
|
||||
// casts. EXCEPTION blocks require subtransactions, which Postgres cannot
|
||||
// use in parallel workers. The functions were marked PARALLEL SAFE but
|
||||
// aren't actually parallel-safe. DuckDB's postgres extension triggers
|
||||
// parallel COPY scans and fails on any row that invokes these.
|
||||
//
|
||||
// Rewrite each as a pure SQL function using jsonb_typeof + regex
|
||||
// validation to achieve the same "coerce-or-null" semantics without
|
||||
// plpgsql. SQL functions with no volatile side effects are genuinely
|
||||
// parallel-safe.
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_numeric(cells jsonb, prop uuid)
|
||||
RETURNS numeric
|
||||
LANGUAGE sql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT CASE jsonb_typeof(cells -> prop::text)
|
||||
WHEN 'number' THEN (cells->>prop::text)::numeric
|
||||
WHEN 'string' THEN
|
||||
CASE WHEN (cells->>prop::text) ~ '^\\s*-?\\d+(\\.\\d+)?([eE][+-]?\\d+)?\\s*$'
|
||||
THEN (cells->>prop::text)::numeric
|
||||
ELSE NULL END
|
||||
ELSE NULL
|
||||
END
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_timestamptz(cells jsonb, prop uuid)
|
||||
RETURNS timestamptz
|
||||
LANGUAGE sql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT CASE
|
||||
WHEN jsonb_typeof(cells -> prop::text) = 'string'
|
||||
AND (cells->>prop::text) ~ '^\\d{4}-\\d{2}-\\d{2}([ T]\\d{2}:\\d{2}(:\\d{2}(\\.\\d+)?)?([+-]\\d{2}(:?\\d{2})?|Z)?)?$'
|
||||
THEN (cells->>prop::text)::timestamptz
|
||||
ELSE NULL
|
||||
END
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_bool(cells jsonb, prop uuid)
|
||||
RETURNS boolean
|
||||
LANGUAGE sql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
SELECT CASE jsonb_typeof(cells -> prop::text)
|
||||
WHEN 'boolean' THEN (cells->>prop::text)::boolean
|
||||
WHEN 'string' THEN
|
||||
CASE lower(cells->>prop::text)
|
||||
WHEN 'true' THEN true
|
||||
WHEN 't' THEN true
|
||||
WHEN 'yes' THEN true
|
||||
WHEN 'y' THEN true
|
||||
WHEN '1' THEN true
|
||||
WHEN 'false' THEN false
|
||||
WHEN 'f' THEN false
|
||||
WHEN 'no' THEN false
|
||||
WHEN 'n' THEN false
|
||||
WHEN '0' THEN false
|
||||
ELSE NULL
|
||||
END
|
||||
ELSE NULL
|
||||
END
|
||||
$$
|
||||
`.execute(db);
|
||||
}
|
||||
|
||||
export async function down(db: Kysely<any>): Promise<void> {
|
||||
// Restore the previous plpgsql + EXCEPTION versions. Same PARALLEL SAFE
|
||||
// labels — they were broken before, they'll still be broken after rollback,
|
||||
// but rollback means you're going back to the prior bug not inventing a
|
||||
// new one.
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_numeric(cells jsonb, prop uuid)
|
||||
RETURNS numeric
|
||||
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN (cells->>prop::text)::numeric;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_timestamptz(cells jsonb, prop uuid)
|
||||
RETURNS timestamptz
|
||||
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN (cells->>prop::text)::timestamptz;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
|
||||
await sql`
|
||||
CREATE OR REPLACE FUNCTION base_cell_bool(cells jsonb, prop uuid)
|
||||
RETURNS boolean
|
||||
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN (cells->>prop::text)::boolean;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN NULL;
|
||||
END;
|
||||
$$
|
||||
`.execute(db);
|
||||
}
|
||||
@@ -128,6 +128,21 @@ export class BaseRowRepo {
|
||||
});
|
||||
}
|
||||
|
||||
async countActiveRows(
|
||||
baseId: string,
|
||||
opts: WorkspaceOpts,
|
||||
): Promise<number> {
|
||||
const db = dbOrTx(this.db, opts.trx);
|
||||
const row = await db
|
||||
.selectFrom('baseRows')
|
||||
.select((eb) => eb.fn.countAll<number>().as('count'))
|
||||
.where('baseId', '=', baseId)
|
||||
.where('workspaceId', '=', opts.workspaceId)
|
||||
.where('deletedAt', 'is', null)
|
||||
.executeTakeFirst();
|
||||
return Number(row?.count ?? 0);
|
||||
}
|
||||
|
||||
async getLastPosition(
|
||||
baseId: string,
|
||||
opts: WorkspaceOpts,
|
||||
|
||||
@@ -304,4 +304,101 @@ export class EnvironmentService {
|
||||
getClickHouseUrl(): string {
|
||||
return this.configService.get<string>('CLICKHOUSE_URL');
|
||||
}
|
||||
|
||||
getBaseQueryCacheEnabled(): boolean {
|
||||
const enabled = this.configService
|
||||
.get<string>('BASE_QUERY_CACHE_ENABLED', 'false')
|
||||
.toLowerCase();
|
||||
return enabled === 'true';
|
||||
}
|
||||
|
||||
getBaseQueryCacheMinRows(): number {
|
||||
return parseInt(
|
||||
this.configService.get<string>('BASE_QUERY_CACHE_MIN_ROWS', '25000'),
|
||||
10,
|
||||
);
|
||||
}
|
||||
|
||||
getBaseQueryCacheMaxCollections(): number {
|
||||
// Default is intentionally low (50) because a single-node self-host with
|
||||
// ~100 MB per collection can pin ~5 GB RSS at the cap. SaaS/larger
|
||||
// deployments can raise via env.
|
||||
return parseInt(
|
||||
this.configService.get<string>('BASE_QUERY_CACHE_MAX_COLLECTIONS', '50'),
|
||||
10,
|
||||
);
|
||||
}
|
||||
|
||||
getBaseQueryCacheWarmTopN(): number {
|
||||
return parseInt(
|
||||
this.configService.get<string>('BASE_QUERY_CACHE_WARM_TOP_N', '50'),
|
||||
10,
|
||||
);
|
||||
}
|
||||
|
||||
getBaseQueryCacheDebug(): boolean {
|
||||
return (
|
||||
this.configService
|
||||
.get<string>('BASE_QUERY_CACHE_DEBUG', 'false')
|
||||
.toLowerCase() === 'true'
|
||||
);
|
||||
}
|
||||
|
||||
getBaseQueryCacheTrace(): boolean {
|
||||
return (
|
||||
this.configService
|
||||
.get<string>('BASE_QUERY_CACHE_TRACE', 'false')
|
||||
.toLowerCase() === 'true'
|
||||
);
|
||||
}
|
||||
|
||||
getBaseQueryCacheMemoryLimit(): string {
|
||||
// Per-DuckDB-instance memory ceiling. DuckDB accepts human-readable sizes:
|
||||
// '256MB', '1GB', etc. Default 512MB is sized for bases up to ~300K rows
|
||||
// with moderate schemas without spilling. DuckDB automatically spills
|
||||
// to `temp_directory` when this is exceeded, so over-allocating is
|
||||
// cheap — the risk is under-sizing.
|
||||
return this.configService.get<string>(
|
||||
'BASE_QUERY_CACHE_MEMORY_LIMIT',
|
||||
'512MB',
|
||||
);
|
||||
}
|
||||
|
||||
getBaseQueryCacheTempDirectory(): string {
|
||||
// Directory DuckDB uses to spill pages when an instance exceeds its
|
||||
// memory_limit. Defaults to the system temp dir plus a namespace so
|
||||
// different processes don't collide. Setting this explicitly is what
|
||||
// enables spill-to-disk on `:memory:` instances — without it, DuckDB
|
||||
// OOMs at memory_limit instead of paging.
|
||||
const defaultPath = `${require('node:os').tmpdir()}/docmost-duckdb-cache`;
|
||||
return this.configService.get<string>(
|
||||
'BASE_QUERY_CACHE_TEMP_DIR',
|
||||
defaultPath,
|
||||
);
|
||||
}
|
||||
|
||||
getBaseQueryCacheThreads(): number {
|
||||
// Per-DuckDB-instance thread budget. Defaults to 2 so multiple concurrent
|
||||
// instances don't fight for every core on a shared host.
|
||||
return parseInt(
|
||||
this.configService.get<string>('BASE_QUERY_CACHE_THREADS', '2'),
|
||||
10,
|
||||
);
|
||||
}
|
||||
|
||||
getBaseQueryCacheReaderPoolSize(): number {
|
||||
// Number of reader connections held open against the shared DuckDB
|
||||
// instance. Reads are dispatched via `withReader()` which checks out a
|
||||
// connection, runs the query, returns it. Bigger pool = more concurrent
|
||||
// reads without serialization, at the cost of per-connection overhead
|
||||
// (each connection carries its own catalog snapshot + prepared-statement
|
||||
// cache ~= 300 KB).
|
||||
//
|
||||
// Default 4 matches libuv's default thread-pool size. Raise to 8+ if
|
||||
// you see p99 list latency correlate with concurrent request volume.
|
||||
return parseInt(
|
||||
this.configService.get<string>('BASE_QUERY_CACHE_READER_POOL_SIZE', '4'),
|
||||
10,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,11 +3,9 @@ import * as dotenv from 'dotenv';
|
||||
import { Kysely } from 'kysely';
|
||||
import { PostgresJSDialect } from 'kysely-postgres-js';
|
||||
import postgres from 'postgres';
|
||||
import { v7 as uuid7 } from 'uuid';
|
||||
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
|
||||
import { seedBase } from '../core/base/query-cache/testing/seed-base';
|
||||
|
||||
const TOTAL_ROWS = Number(process.env.TOTAL_ROWS) || 1500;
|
||||
const BATCH_SIZE = 2000;
|
||||
|
||||
const envFilePath = path.resolve(process.cwd(), '..', '..', '.env');
|
||||
dotenv.config({ path: envFilePath });
|
||||
@@ -30,206 +28,6 @@ const db = new Kysely<any>({
|
||||
}),
|
||||
});
|
||||
|
||||
const SKIP_TYPES = new Set([
|
||||
'createdAt',
|
||||
'lastEditedAt',
|
||||
'lastEditedBy',
|
||||
'person',
|
||||
'file',
|
||||
]);
|
||||
|
||||
const WORDS = [
|
||||
'Alpha', 'Bravo', 'Charlie', 'Delta', 'Echo', 'Foxtrot', 'Golf',
|
||||
'Hotel', 'India', 'Juliet', 'Kilo', 'Lima', 'Mike', 'November',
|
||||
'Oscar', 'Papa', 'Quebec', 'Romeo', 'Sierra', 'Tango', 'Uniform',
|
||||
'Victor', 'Whiskey', 'X-ray', 'Yankee', 'Zulu', 'Report', 'Analysis',
|
||||
'Summary', 'Review', 'Update', 'Draft', 'Final', 'Proposal', 'Budget',
|
||||
'Timeline', 'Milestone', 'Objective', 'Strategy', 'Initiative',
|
||||
];
|
||||
|
||||
const COLORS = [
|
||||
'red', 'orange', 'yellow', 'green', 'blue', 'purple', 'pink', 'gray',
|
||||
];
|
||||
|
||||
function randomWords(min: number, max: number): string {
|
||||
const count = min + Math.floor(Math.random() * (max - min + 1));
|
||||
const result: string[] = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
result.push(WORDS[Math.floor(Math.random() * WORDS.length)]);
|
||||
}
|
||||
return result.join(' ');
|
||||
}
|
||||
|
||||
function makeChoices(names: string[], category?: string) {
|
||||
return names.map((name, i) => ({
|
||||
id: uuid7(),
|
||||
name,
|
||||
color: COLORS[i % COLORS.length],
|
||||
...(category ? {} : {}),
|
||||
}));
|
||||
}
|
||||
|
||||
function makeStatusChoices() {
|
||||
const todo = [{ id: uuid7(), name: 'Not Started', color: 'gray', category: 'todo' }];
|
||||
const inProgress = [
|
||||
{ id: uuid7(), name: 'In Progress', color: 'blue', category: 'inProgress' },
|
||||
{ id: uuid7(), name: 'In Review', color: 'purple', category: 'inProgress' },
|
||||
];
|
||||
const complete = [
|
||||
{ id: uuid7(), name: 'Done', color: 'green', category: 'complete' },
|
||||
{ id: uuid7(), name: 'Cancelled', color: 'red', category: 'complete' },
|
||||
];
|
||||
const all = [...todo, ...inProgress, ...complete];
|
||||
return { choices: all, choiceOrder: all.map((c) => c.id) };
|
||||
}
|
||||
|
||||
type PropertyDef = {
|
||||
name: string;
|
||||
type: string;
|
||||
isPrimary?: boolean;
|
||||
typeOptions?: any;
|
||||
};
|
||||
|
||||
function buildPropertyDefinitions(): PropertyDef[] {
|
||||
const priorityChoices = makeChoices(['Low', 'Medium', 'High', 'Critical']);
|
||||
const categoryChoices = makeChoices(['Engineering', 'Design', 'Marketing', 'Sales', 'Support', 'Operations']);
|
||||
const tagChoices = makeChoices(['Bug', 'Feature', 'Improvement', 'Documentation', 'Research']);
|
||||
const statusOpts = makeStatusChoices();
|
||||
|
||||
return [
|
||||
{ name: 'Title', type: 'text', isPrimary: true },
|
||||
{ name: 'Status', type: 'status', typeOptions: statusOpts },
|
||||
{ name: 'Priority', type: 'select', typeOptions: { choices: priorityChoices, choiceOrder: priorityChoices.map((c) => c.id) } },
|
||||
{ name: 'Category', type: 'select', typeOptions: { choices: categoryChoices, choiceOrder: categoryChoices.map((c) => c.id) } },
|
||||
{ name: 'Tags', type: 'multiSelect', typeOptions: { choices: tagChoices, choiceOrder: tagChoices.map((c) => c.id) } },
|
||||
{ name: 'Due Date', type: 'date', typeOptions: { dateFormat: 'YYYY-MM-DD', includeTime: false } },
|
||||
{ name: 'Estimate', type: 'number', typeOptions: { format: 'plain', precision: 1 } },
|
||||
{ name: 'Budget', type: 'number', typeOptions: { format: 'currency', precision: 2, currencySymbol: '$' } },
|
||||
{ name: 'Approved', type: 'checkbox' },
|
||||
{ name: 'Website', type: 'url' },
|
||||
{ name: 'Contact Email', type: 'email' },
|
||||
{ name: 'Notes', type: 'text' },
|
||||
{ name: 'Created', type: 'createdAt' },
|
||||
{ name: 'Last Edited', type: 'lastEditedAt' },
|
||||
];
|
||||
}
|
||||
|
||||
type CellGenerator = () => unknown;
|
||||
|
||||
function buildCellGenerator(property: any): CellGenerator | null {
|
||||
if (SKIP_TYPES.has(property.type)) return null;
|
||||
|
||||
const typeOptions = property.type_options;
|
||||
|
||||
switch (property.type) {
|
||||
case 'text':
|
||||
return () => randomWords(2, 6);
|
||||
|
||||
case 'number':
|
||||
return () => Math.round(Math.random() * 10000 * 100) / 100;
|
||||
|
||||
case 'select':
|
||||
case 'status': {
|
||||
const choices = typeOptions?.choices ?? [];
|
||||
if (choices.length === 0) return null;
|
||||
return () => choices[Math.floor(Math.random() * choices.length)].id;
|
||||
}
|
||||
|
||||
case 'multiSelect': {
|
||||
const choices = typeOptions?.choices ?? [];
|
||||
if (choices.length === 0) return () => [];
|
||||
return () => {
|
||||
const count = 1 + Math.floor(Math.random() * Math.min(3, choices.length));
|
||||
const shuffled = [...choices].sort(() => Math.random() - 0.5);
|
||||
return shuffled.slice(0, count).map((c: any) => c.id);
|
||||
};
|
||||
}
|
||||
|
||||
case 'date': {
|
||||
const start = new Date(2020, 0, 1).getTime();
|
||||
const range = new Date(2026, 0, 1).getTime() - start;
|
||||
return () => new Date(start + Math.random() * range).toISOString();
|
||||
}
|
||||
|
||||
case 'checkbox':
|
||||
return () => Math.random() > 0.5;
|
||||
|
||||
case 'url':
|
||||
return () => `https://example.com/page/${Math.floor(Math.random() * 100000)}`;
|
||||
|
||||
case 'email':
|
||||
return () => `user${Math.floor(Math.random() * 100000)}@example.com`;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function createBase(workspaceId: string, spaceId: string, creatorId: string | null): Promise<string> {
|
||||
const baseId = uuid7();
|
||||
const rowCountLabel = TOTAL_ROWS >= 1000 ? `${Math.round(TOTAL_ROWS / 1000)}K` : `${TOTAL_ROWS}`;
|
||||
const baseName = `Seed Base ${rowCountLabel} rows`;
|
||||
|
||||
await db.insertInto('bases').values({
|
||||
id: baseId,
|
||||
name: baseName,
|
||||
space_id: spaceId,
|
||||
workspace_id: workspaceId,
|
||||
creator_id: creatorId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
}).execute();
|
||||
|
||||
console.log(`Created base: ${baseName}`);
|
||||
console.log(`Base ID: ${baseId}\n`);
|
||||
|
||||
// Create properties
|
||||
const propertyDefs = buildPropertyDefinitions();
|
||||
let propPosition: string | null = null;
|
||||
const insertedProperties: any[] = [];
|
||||
|
||||
for (const def of propertyDefs) {
|
||||
propPosition = generateJitteredKeyBetween(propPosition, null);
|
||||
const prop = {
|
||||
id: uuid7(),
|
||||
base_id: baseId,
|
||||
name: def.name,
|
||||
type: def.type,
|
||||
position: propPosition,
|
||||
type_options: def.typeOptions ?? null,
|
||||
is_primary: def.isPrimary ?? false,
|
||||
workspace_id: workspaceId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
};
|
||||
insertedProperties.push(prop);
|
||||
}
|
||||
|
||||
await db.insertInto('base_properties').values(insertedProperties).execute();
|
||||
console.log(`Created ${insertedProperties.length} properties:`);
|
||||
for (const p of insertedProperties) {
|
||||
console.log(` - ${p.name} (${p.type})${p.is_primary ? ' [primary]' : ''}${SKIP_TYPES.has(p.type) ? ' [system]' : ''}`);
|
||||
}
|
||||
|
||||
// Create default view
|
||||
const viewId = uuid7();
|
||||
await db.insertInto('base_views').values({
|
||||
id: viewId,
|
||||
base_id: baseId,
|
||||
name: 'Table View 1',
|
||||
type: 'table',
|
||||
position: generateJitteredKeyBetween(null, null),
|
||||
config: {},
|
||||
workspace_id: workspaceId,
|
||||
creator_id: creatorId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
}).execute();
|
||||
console.log(`Created view: Table View 1\n`);
|
||||
|
||||
return baseId;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const spaceId = '019c69a3-dd47-7014-8b87-ec8f167577ee';
|
||||
|
||||
@@ -247,75 +45,26 @@ async function main() {
|
||||
.limit(1)
|
||||
.executeTakeFirst();
|
||||
|
||||
const creatorId = user?.id ?? null;
|
||||
const creatorUserId = user?.id ?? null;
|
||||
|
||||
console.log(`Workspace: ${workspaceId}`);
|
||||
console.log(`Space: ${spaceId}`);
|
||||
console.log(`Creator: ${creatorId ?? '(none)'}\n`);
|
||||
|
||||
// Create the base with properties and view
|
||||
const baseId = await createBase(workspaceId, spaceId, creatorId);
|
||||
|
||||
// Load the created properties for cell generation
|
||||
const properties = await db
|
||||
.selectFrom('base_properties')
|
||||
.selectAll()
|
||||
.where('base_id', '=', baseId)
|
||||
.execute();
|
||||
|
||||
const generators: Array<{ propertyId: string; generate: CellGenerator }> = [];
|
||||
for (const prop of properties) {
|
||||
const gen = buildCellGenerator(prop);
|
||||
if (gen) {
|
||||
generators.push({ propertyId: prop.id, generate: gen });
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Generating ${TOTAL_ROWS.toLocaleString()} positions...`);
|
||||
|
||||
let lastPosition: string | null = null;
|
||||
const positions: string[] = new Array(TOTAL_ROWS);
|
||||
for (let i = 0; i < TOTAL_ROWS; i++) {
|
||||
lastPosition = generateJitteredKeyBetween(lastPosition, null);
|
||||
positions[i] = lastPosition;
|
||||
}
|
||||
console.log(`Positions generated (last: ${positions[positions.length - 1]})\n`);
|
||||
console.log(`Creator: ${creatorUserId ?? '(none)'}\n`);
|
||||
|
||||
const startTime = Date.now();
|
||||
const totalBatches = Math.ceil(TOTAL_ROWS / BATCH_SIZE);
|
||||
|
||||
for (let batchStart = 0; batchStart < TOTAL_ROWS; batchStart += BATCH_SIZE) {
|
||||
const batchEnd = Math.min(batchStart + BATCH_SIZE, TOTAL_ROWS);
|
||||
const rows: any[] = [];
|
||||
|
||||
for (let i = batchStart; i < batchEnd; i++) {
|
||||
const cells: Record<string, unknown> = {};
|
||||
for (const { propertyId, generate } of generators) {
|
||||
cells[propertyId] = generate();
|
||||
}
|
||||
|
||||
rows.push({
|
||||
id: uuid7(),
|
||||
base_id: baseId,
|
||||
cells,
|
||||
position: positions[i],
|
||||
creator_id: creatorId,
|
||||
workspace_id: workspaceId,
|
||||
created_at: new Date(),
|
||||
updated_at: new Date(),
|
||||
});
|
||||
}
|
||||
|
||||
await db.insertInto('base_rows').values(rows).execute();
|
||||
|
||||
const batchNum = Math.floor(batchStart / BATCH_SIZE) + 1;
|
||||
const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
console.log(`Batch ${batchNum}/${totalBatches} inserted (${batchEnd.toLocaleString()} rows, ${elapsed}s elapsed)`);
|
||||
}
|
||||
|
||||
const { baseId } = await seedBase({
|
||||
db,
|
||||
workspaceId,
|
||||
spaceId,
|
||||
creatorUserId,
|
||||
rows: TOTAL_ROWS,
|
||||
});
|
||||
const totalElapsed = ((Date.now() - startTime) / 1000).toFixed(1);
|
||||
console.log(`\nDone. Inserted ${TOTAL_ROWS.toLocaleString()} rows in ${totalElapsed}s`);
|
||||
console.log(`\nBase ID: ${baseId}`);
|
||||
|
||||
console.log(
|
||||
`Inserted ${TOTAL_ROWS.toLocaleString()} rows in ${totalElapsed}s`,
|
||||
);
|
||||
console.log(`Base ID: ${baseId}`);
|
||||
|
||||
await db.destroy();
|
||||
process.exit(0);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Generated
+71
@@ -496,6 +496,9 @@ importers:
|
||||
'@clickhouse/client':
|
||||
specifier: ^1.18.2
|
||||
version: 1.18.2
|
||||
'@duckdb/node-api':
|
||||
specifier: 1.5.2-r.1
|
||||
version: 1.5.2-r.1
|
||||
'@fastify/cookie':
|
||||
specifier: ^11.0.2
|
||||
version: 11.0.2
|
||||
@@ -1852,6 +1855,42 @@ packages:
|
||||
peerDependencies:
|
||||
react: '>=16.8.0'
|
||||
|
||||
'@duckdb/node-api@1.5.2-r.1':
|
||||
resolution: {integrity: sha512-OzBBnS0JGXMoS5mzKNY/Ylr7SshcRQiLFIoxQ4AlePwJ2fNeDL/fbHu/knjxUrXwW1fJBTUgwWftmxDdnZZb3A==}
|
||||
|
||||
'@duckdb/node-bindings-darwin-arm64@1.5.2-r.1':
|
||||
resolution: {integrity: sha512-v35FyKOb8EJCvaiPF7k0gvKiJTXR7PPQDNoWR0Gu+YSX5O9b+DIguzt1348Of3HebHy6ATSMzlUekaVA9YXu+g==}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@duckdb/node-bindings-darwin-x64@1.5.2-r.1':
|
||||
resolution: {integrity: sha512-SU9dIJ1BluKkkGxi4UsP4keqkkstB2YDySF9KcYu3EZKIVM3FTv2zc7XO38dXnHOq6+F3WqhWWZvD+XU945p7A==}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@duckdb/node-bindings-linux-arm64@1.5.2-r.1':
|
||||
resolution: {integrity: sha512-3Tra9xM3aM3denaER4KhJ6//6PpmPbik9ECBQ+sh9PyKaEgHw/0kAcKnLm5EzWUnXF0qYmZlewvkCrse8KmOYw==}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@duckdb/node-bindings-linux-x64@1.5.2-r.1':
|
||||
resolution: {integrity: sha512-pcQvZRHiIfJ9cq8parkSQczQHEml/IeGfnDCMAbEgD6+jaV9Y9Y5Ph1kP9aR+bm6him1S5ZIEr3kZbihjKnWbA==}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@duckdb/node-bindings-win32-arm64@1.5.2-r.1':
|
||||
resolution: {integrity: sha512-Ji8tym+N3LkrhVt0Up3bsacD/kpg4/JXFJQqxswiYvBaNCQOk+D+aiVS0GN5pcqvmnG7V7TpsDRzkLEFaWp1vw==}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@duckdb/node-bindings-win32-x64@1.5.2-r.1':
|
||||
resolution: {integrity: sha512-5XqcqC+4R8ghBEEbnc2a0sqfz1zyPBRb9YcmIWfiuDoCYSYFbKhmHcEyNftZDHcwCoLOHXnUin45jraex4STqQ==}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
|
||||
'@duckdb/node-bindings@1.5.2-r.1':
|
||||
resolution: {integrity: sha512-bUg3bLVj70YVku6fKyQJS8ASORl7kM7YFVFznsEB9pWbtazPj+ME2x2FUk0WiTzjJdutjzSSGXF066mB4bGGZA==}
|
||||
|
||||
'@emnapi/core@1.8.1':
|
||||
resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==}
|
||||
|
||||
@@ -4040,6 +4079,7 @@ packages:
|
||||
'@react-email/components@1.0.10':
|
||||
resolution: {integrity: sha512-r/BnqfAjr3apcvn/NDx2DqNRD5BP5wZLRdjn2IVHXjt4KmQ5RHWSCAvFiXAzRHys1BWQ2zgIc7cpWePUcAl+nw==}
|
||||
engines: {node: '>=20.0.0'}
|
||||
deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info.
|
||||
peerDependencies:
|
||||
react: ^18.0 || ^19.0 || ^19.0.0-rc
|
||||
|
||||
@@ -12265,6 +12305,37 @@ snapshots:
|
||||
react: 18.3.1
|
||||
tslib: 2.8.1
|
||||
|
||||
'@duckdb/node-api@1.5.2-r.1':
|
||||
dependencies:
|
||||
'@duckdb/node-bindings': 1.5.2-r.1
|
||||
|
||||
'@duckdb/node-bindings-darwin-arm64@1.5.2-r.1':
|
||||
optional: true
|
||||
|
||||
'@duckdb/node-bindings-darwin-x64@1.5.2-r.1':
|
||||
optional: true
|
||||
|
||||
'@duckdb/node-bindings-linux-arm64@1.5.2-r.1':
|
||||
optional: true
|
||||
|
||||
'@duckdb/node-bindings-linux-x64@1.5.2-r.1':
|
||||
optional: true
|
||||
|
||||
'@duckdb/node-bindings-win32-arm64@1.5.2-r.1':
|
||||
optional: true
|
||||
|
||||
'@duckdb/node-bindings-win32-x64@1.5.2-r.1':
|
||||
optional: true
|
||||
|
||||
'@duckdb/node-bindings@1.5.2-r.1':
|
||||
optionalDependencies:
|
||||
'@duckdb/node-bindings-darwin-arm64': 1.5.2-r.1
|
||||
'@duckdb/node-bindings-darwin-x64': 1.5.2-r.1
|
||||
'@duckdb/node-bindings-linux-arm64': 1.5.2-r.1
|
||||
'@duckdb/node-bindings-linux-x64': 1.5.2-r.1
|
||||
'@duckdb/node-bindings-win32-arm64': 1.5.2-r.1
|
||||
'@duckdb/node-bindings-win32-x64': 1.5.2-r.1
|
||||
|
||||
'@emnapi/core@1.8.1':
|
||||
dependencies:
|
||||
'@emnapi/wasi-threads': 1.1.0
|
||||
|
||||
Reference in New Issue
Block a user