This commit is contained in:
Philipinho
2026-04-18 13:13:53 +01:00
parent 081bb67239
commit f5b19316af
53 changed files with 4057 additions and 813 deletions
@@ -6,6 +6,7 @@ import { useTranslation } from "react-i18next";
import { arrayMove } from "@dnd-kit/sortable"; import { arrayMove } from "@dnd-kit/sortable";
import { generateJitteredKeyBetween } from "fractional-indexing-jittered"; import { generateJitteredKeyBetween } from "fractional-indexing-jittered";
import { useBaseQuery } from "@/features/base/queries/base-query"; import { useBaseQuery } from "@/features/base/queries/base-query";
import { useBaseSocket } from "@/features/base/hooks/use-base-socket";
import { import {
useBaseRowsQuery, useBaseRowsQuery,
flattenRows, flattenRows,
@@ -26,6 +27,9 @@ type BaseTableProps = {
export function BaseTable({ baseId }: BaseTableProps) { export function BaseTable({ baseId }: BaseTableProps) {
const { t } = useTranslation(); const { t } = useTranslation();
// Subscribe to the base's realtime room so other clients' edits,
// schema changes, and async-job completions reconcile into our cache.
useBaseSocket(baseId);
const { data: base, isLoading: baseLoading, error: baseError } = useBaseQuery(baseId); const { data: base, isLoading: baseLoading, error: baseError } = useBaseQuery(baseId);
const [activeViewId, setActiveViewId] = useAtom(activeViewIdAtom) as unknown as [string | null, (val: string | null) => void]; const [activeViewId, setActiveViewId] = useAtom(activeViewIdAtom) as unknown as [string | null, (val: string | null) => void];
@@ -36,10 +40,10 @@ export function BaseTable({ baseId }: BaseTableProps) {
return views.find((v) => v.id === activeViewId) ?? views[0]; return views.find((v) => v.id === activeViewId) ?? views[0];
}, [views, activeViewId]); }, [views, activeViewId]);
const activeFilters = activeView?.config?.filters; const activeFilter = activeView?.config?.filter;
const activeSorts = activeView?.config?.sorts; const activeSorts = activeView?.config?.sorts;
const { data: rowsData, isLoading: rowsLoading, fetchNextPage, hasNextPage, isFetchingNextPage } = const { data: rowsData, isLoading: rowsLoading, fetchNextPage, hasNextPage, isFetchingNextPage } =
useBaseRowsQuery(baseId, activeFilters, activeSorts); useBaseRowsQuery(baseId, activeFilter, activeSorts);
const updateRowMutation = useUpdateRowMutation(); const updateRowMutation = useUpdateRowMutation();
const createRowMutation = useCreateRowMutation(); const createRowMutation = useCreateRowMutation();
@@ -11,7 +11,8 @@ import {
IBaseRow, IBaseRow,
IBaseView, IBaseView,
ViewSortConfig, ViewSortConfig,
ViewFilterConfig, FilterCondition,
FilterGroup,
} from "@/features/base/types/base.types"; } from "@/features/base/types/base.types";
import { useUpdateViewMutation } from "@/features/base/queries/base-view-query"; import { useUpdateViewMutation } from "@/features/base/queries/base-view-query";
import { ViewTabs } from "@/features/base/components/views/view-tabs"; import { ViewTabs } from "@/features/base/components/views/view-tabs";
@@ -54,7 +55,16 @@ export function BaseToolbar({
const updateViewMutation = useUpdateViewMutation(); const updateViewMutation = useUpdateViewMutation();
const sorts = activeView?.config?.sorts ?? []; const sorts = activeView?.config?.sorts ?? [];
const filters = activeView?.config?.filters ?? []; // Stored view config uses the engine's filter tree. The popover edits
// an AND-only flat list; we unwrap the top-level group's children when
// reading and rewrap on save.
const conditions = useMemo<FilterCondition[]>(() => {
const filter = activeView?.config?.filter;
if (!filter || filter.op !== "and") return [];
return filter.children.filter(
(c): c is FilterCondition => !("children" in c),
);
}, [activeView?.config?.filter]);
const hiddenFieldCount = useMemo(() => { const hiddenFieldCount = useMemo(() => {
const cols = table.getAllLeafColumns().filter((col) => col.id !== "__row_number"); const cols = table.getAllLeafColumns().filter((col) => col.id !== "__row_number");
@@ -74,12 +84,17 @@ export function BaseToolbar({
); );
const handleFiltersChange = useCallback( const handleFiltersChange = useCallback(
(newFilters: ViewFilterConfig[]) => { (newConditions: FilterCondition[]) => {
if (!activeView) return; if (!activeView) return;
const filter: FilterGroup | undefined =
newConditions.length > 0
? { op: "and", children: newConditions }
: undefined;
const { filter: _drop, ...rest } = activeView.config ?? {};
updateViewMutation.mutate({ updateViewMutation.mutate({
viewId: activeView.id, viewId: activeView.id,
baseId: base.id, baseId: base.id,
config: { ...activeView.config, filters: newFilters }, config: filter ? { ...rest, filter } : rest,
}); });
}, },
[activeView, base.id, updateViewMutation], [activeView, base.id, updateViewMutation],
@@ -99,7 +114,7 @@ export function BaseToolbar({
<ViewFilterConfigPopover <ViewFilterConfigPopover
opened={filterOpened} opened={filterOpened}
onClose={() => setFilterOpened(false)} onClose={() => setFilterOpened(false)}
filters={filters} conditions={conditions}
properties={base.properties} properties={base.properties}
onChange={handleFiltersChange} onChange={handleFiltersChange}
> >
@@ -107,11 +122,11 @@ export function BaseToolbar({
<ActionIcon <ActionIcon
variant="subtle" variant="subtle"
size="sm" size="sm"
color={filters.length > 0 ? "blue" : "gray"} color={conditions.length > 0 ? "blue" : "gray"}
onClick={() => openToolbar("filter")} onClick={() => openToolbar("filter")}
> >
<IconFilter size={16} /> <IconFilter size={16} />
{filters.length > 0 && ( {conditions.length > 0 && (
<Badge <Badge
size="xs" size="xs"
circle circle
@@ -127,7 +142,7 @@ export function BaseToolbar({
fontSize: 9, fontSize: 9,
}} }}
> >
{filters.length} {conditions.length}
</Badge> </Badge>
)} )}
</ActionIcon> </ActionIcon>
@@ -2,7 +2,8 @@ import { memo, useCallback, useRef } from "react";
import { Header, flexRender } from "@tanstack/react-table"; import { Header, flexRender } from "@tanstack/react-table";
import { useSortable } from "@dnd-kit/sortable"; import { useSortable } from "@dnd-kit/sortable";
import { CSS } from "@dnd-kit/utilities"; import { CSS } from "@dnd-kit/utilities";
import { Popover } from "@mantine/core"; import { Loader, Popover, Tooltip } from "@mantine/core";
import { useTranslation } from "react-i18next";
import { useAtom } from "jotai"; import { useAtom } from "jotai";
import { IBaseRow, IBaseProperty, EditingCell } from "@/features/base/types/base.types"; import { IBaseRow, IBaseProperty, EditingCell } from "@/features/base/types/base.types";
import { activePropertyMenuAtom, propertyMenuDirtyAtom, editingCellAtom } from "@/features/base/atoms/base-atoms"; import { activePropertyMenuAtom, propertyMenuDirtyAtom, editingCellAtom } from "@/features/base/atoms/base-atoms";
@@ -49,12 +50,14 @@ type GridHeaderCellProps = {
export const GridHeaderCell = memo(function GridHeaderCell({ export const GridHeaderCell = memo(function GridHeaderCell({
header, header,
}: GridHeaderCellProps) { }: GridHeaderCellProps) {
const { t } = useTranslation();
const property = header.column.columnDef.meta?.property as const property = header.column.columnDef.meta?.property as
| IBaseProperty | IBaseProperty
| undefined; | undefined;
const isRowNumber = header.column.id === "__row_number"; const isRowNumber = header.column.id === "__row_number";
const isPinned = header.column.getIsPinned(); const isPinned = header.column.getIsPinned();
const pinOffset = isPinned ? header.column.getStart("left") : undefined; const pinOffset = isPinned ? header.column.getStart("left") : undefined;
const isConverting = !!property?.pendingType;
const [activePropertyMenu, setActivePropertyMenu] = useAtom(activePropertyMenuAtom) as unknown as [string | null, (val: string | null) => void]; const [activePropertyMenu, setActivePropertyMenu] = useAtom(activePropertyMenuAtom) as unknown as [string | null, (val: string | null) => void];
const menuOpened = activePropertyMenu === header.column.id; const menuOpened = activePropertyMenu === header.column.id;
@@ -138,6 +141,20 @@ export const GridHeaderCell = memo(function GridHeaderCell({
<span className={classes.headerCellName}> <span className={classes.headerCellName}>
{flexRender(header.column.columnDef.header, header.getContext())} {flexRender(header.column.columnDef.header, header.getContext())}
</span> </span>
{isConverting && (
<Tooltip
label={t("Converting to {{type}}…", {
type: property?.pendingType,
})}
withArrow
>
<Loader
size={12}
color="gray"
className={classes.headerConvertingSpinner}
/>
</Tooltip>
)}
</div> </div>
)} )}
{header.column.getCanResize() && ( {header.column.getCanResize() && (
@@ -13,61 +13,69 @@ import { IconPlus, IconTrash } from "@tabler/icons-react";
import { import {
IBaseProperty, IBaseProperty,
SelectTypeOptions, SelectTypeOptions,
ViewFilterConfig, FilterCondition,
ViewFilterOperator, FilterOperator,
} from "@/features/base/types/base.types"; } from "@/features/base/types/base.types";
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
const OPERATORS: { value: ViewFilterOperator; labelKey: string }[] = [ /*
{ value: "equals", labelKey: "Equals" }, * Operator metadata for the filter popover. Values use the server
{ value: "notEquals", labelKey: "Not equals" }, * engine's operator set (`core/base/engine/schema.zod.ts`); labels are
* i18n-translated display strings.
*/
const OPERATORS: { value: FilterOperator; labelKey: string }[] = [
{ value: "eq", labelKey: "Equals" },
{ value: "neq", labelKey: "Not equals" },
{ value: "contains", labelKey: "Contains" }, { value: "contains", labelKey: "Contains" },
{ value: "notContains", labelKey: "Not contains" }, { value: "ncontains", labelKey: "Not contains" },
{ value: "isEmpty", labelKey: "Is empty" }, { value: "isEmpty", labelKey: "Is empty" },
{ value: "isNotEmpty", labelKey: "Is not empty" }, { value: "isNotEmpty", labelKey: "Is not empty" },
{ value: "greaterThan", labelKey: "Greater than" }, { value: "gt", labelKey: "Greater than" },
{ value: "lessThan", labelKey: "Less than" }, { value: "lt", labelKey: "Less than" },
{ value: "before", labelKey: "Before" }, { value: "before", labelKey: "Before" },
{ value: "after", labelKey: "After" }, { value: "after", labelKey: "After" },
{ value: "any", labelKey: "Any of" },
{ value: "none", labelKey: "None of" },
]; ];
const NO_VALUE_OPERATORS: ViewFilterOperator[] = ["isEmpty", "isNotEmpty"]; const NO_VALUE_OPERATORS: FilterOperator[] = ["isEmpty", "isNotEmpty"];
function getOperatorsForType(type: string): ViewFilterOperator[] { function getOperatorsForType(type: string): FilterOperator[] {
switch (type) { switch (type) {
case "text": case "text":
case "email": case "email":
case "url": case "url":
return ["equals", "notEquals", "contains", "notContains", "isEmpty", "isNotEmpty"]; return ["eq", "neq", "contains", "ncontains", "isEmpty", "isNotEmpty"];
case "number": case "number":
return ["equals", "notEquals", "greaterThan", "lessThan", "isEmpty", "isNotEmpty"]; return ["eq", "neq", "gt", "lt", "isEmpty", "isNotEmpty"];
case "date": case "date":
case "createdAt": case "createdAt":
case "lastEditedAt": case "lastEditedAt":
return ["equals", "notEquals", "before", "after", "isEmpty", "isNotEmpty"]; return ["eq", "neq", "before", "after", "isEmpty", "isNotEmpty"];
case "select": case "select":
case "status": case "status":
return ["eq", "neq", "any", "none", "isEmpty", "isNotEmpty"];
case "multiSelect": case "multiSelect":
return ["equals", "notEquals", "isEmpty", "isNotEmpty"]; return ["any", "none", "isEmpty", "isNotEmpty"];
case "checkbox": case "checkbox":
return ["equals", "isEmpty", "isNotEmpty"]; return ["eq", "isEmpty", "isNotEmpty"];
case "person": case "person":
case "lastEditedBy": case "lastEditedBy":
return ["equals", "notEquals", "isEmpty", "isNotEmpty"]; return ["eq", "neq", "any", "none", "isEmpty", "isNotEmpty"];
case "file": case "file":
return ["isEmpty", "isNotEmpty"]; return ["isEmpty", "isNotEmpty"];
default: default:
return ["equals", "notEquals", "isEmpty", "isNotEmpty"]; return ["eq", "neq", "isEmpty", "isNotEmpty"];
} }
} }
function FilterValueInput({ function FilterValueInput({
filter, condition,
property, property,
onChange, onChange,
t, t,
}: { }: {
filter: ViewFilterConfig; condition: FilterCondition;
property: IBaseProperty | undefined; property: IBaseProperty | undefined;
onChange: (value: string) => void; onChange: (value: string) => void;
t: (key: string) => string; t: (key: string) => string;
@@ -77,7 +85,7 @@ function FilterValueInput({
<TextInput <TextInput
size="xs" size="xs"
placeholder={t("Value")} placeholder={t("Value")}
value={(filter.value as string) ?? ""} value={(condition.value as string) ?? ""}
onChange={(e) => onChange(e.currentTarget.value)} onChange={(e) => onChange(e.currentTarget.value)}
w={100} w={100}
/> />
@@ -94,7 +102,7 @@ function FilterValueInput({
<Select <Select
size="xs" size="xs"
data={choiceOptions} data={choiceOptions}
value={(filter.value as string) ?? null} value={(condition.value as string) ?? null}
onChange={(val) => onChange(val ?? "")} onChange={(val) => onChange(val ?? "")}
w={120} w={120}
placeholder={t("Select")} placeholder={t("Select")}
@@ -108,7 +116,7 @@ function FilterValueInput({
size="xs" size="xs"
type="number" type="number"
placeholder={t("Value")} placeholder={t("Value")}
value={(filter.value as string) ?? ""} value={(condition.value as string) ?? ""}
onChange={(e) => onChange(e.currentTarget.value)} onChange={(e) => onChange(e.currentTarget.value)}
w={100} w={100}
/> />
@@ -123,7 +131,7 @@ function FilterValueInput({
{ value: "true", label: t("True") }, { value: "true", label: t("True") },
{ value: "false", label: t("False") }, { value: "false", label: t("False") },
]} ]}
value={(filter.value as string) ?? null} value={(condition.value as string) ?? null}
onChange={(val) => onChange(val ?? "")} onChange={(val) => onChange(val ?? "")}
w={100} w={100}
/> />
@@ -134,7 +142,7 @@ function FilterValueInput({
<TextInput <TextInput
size="xs" size="xs"
placeholder={t("Value")} placeholder={t("Value")}
value={(filter.value as string) ?? ""} value={(condition.value as string) ?? ""}
onChange={(e) => onChange(e.currentTarget.value)} onChange={(e) => onChange(e.currentTarget.value)}
w={100} w={100}
/> />
@@ -144,16 +152,16 @@ function FilterValueInput({
type ViewFilterConfigProps = { type ViewFilterConfigProps = {
opened: boolean; opened: boolean;
onClose: () => void; onClose: () => void;
filters: ViewFilterConfig[]; conditions: FilterCondition[];
properties: IBaseProperty[]; properties: IBaseProperty[];
onChange: (filters: ViewFilterConfig[]) => void; onChange: (conditions: FilterCondition[]) => void;
children: React.ReactNode; children: React.ReactNode;
}; };
export function ViewFilterConfigPopover({ export function ViewFilterConfigPopover({
opened, opened,
onClose, onClose,
filters, conditions,
properties, properties,
onChange, onChange,
children, children,
@@ -169,18 +177,20 @@ export function ViewFilterConfigPopover({
const firstProperty = properties[0]; const firstProperty = properties[0];
if (!firstProperty) return; if (!firstProperty) return;
const validOperators = getOperatorsForType(firstProperty.type); const validOperators = getOperatorsForType(firstProperty.type);
const defaultOperator = validOperators.includes("contains") ? "contains" : validOperators[0]; const defaultOperator = validOperators.includes("contains")
? ("contains" as FilterOperator)
: validOperators[0];
onChange([ onChange([
...filters, ...conditions,
{ propertyId: firstProperty.id, operator: defaultOperator }, { propertyId: firstProperty.id, op: defaultOperator },
]); ]);
}, [filters, properties, onChange]); }, [conditions, properties, onChange]);
const handleRemove = useCallback( const handleRemove = useCallback(
(index: number) => { (index: number) => {
onChange(filters.filter((_, i) => i !== index)); onChange(conditions.filter((_, i) => i !== index));
}, },
[filters, onChange], [conditions, onChange],
); );
const handlePropertyChange = useCallback( const handlePropertyChange = useCallback(
@@ -188,15 +198,15 @@ export function ViewFilterConfigPopover({
if (!propertyId) return; if (!propertyId) return;
const newProperty = properties.find((p) => p.id === propertyId); const newProperty = properties.find((p) => p.id === propertyId);
onChange( onChange(
filters.map((f, i) => { conditions.map((f, i) => {
if (i !== index) return f; if (i !== index) return f;
if (newProperty) { if (newProperty) {
const validOperators = getOperatorsForType(newProperty.type); const validOperators = getOperatorsForType(newProperty.type);
const currentOperatorValid = validOperators.includes(f.operator); const currentOperatorValid = validOperators.includes(f.op);
return { return {
...f, ...f,
propertyId, propertyId,
operator: currentOperatorValid ? f.operator : validOperators[0], op: currentOperatorValid ? f.op : validOperators[0],
value: currentOperatorValid ? f.value : undefined, value: currentOperatorValid ? f.value : undefined,
}; };
} }
@@ -204,38 +214,38 @@ export function ViewFilterConfigPopover({
}), }),
); );
}, },
[filters, properties, onChange], [conditions, properties, onChange],
); );
const handleOperatorChange = useCallback( const handleOperatorChange = useCallback(
(index: number, operator: string | null) => { (index: number, operator: string | null) => {
if (!operator) return; if (!operator) return;
const op = operator as ViewFilterOperator; const op = operator as FilterOperator;
const needsValue = !NO_VALUE_OPERATORS.includes(op); const needsValue = !NO_VALUE_OPERATORS.includes(op);
onChange( onChange(
filters.map((f, i) => conditions.map((f, i) =>
i === index i === index
? { ? {
...f, ...f,
operator: op, op,
value: needsValue ? f.value : undefined, value: needsValue ? f.value : undefined,
} }
: f, : f,
), ),
); );
}, },
[filters, onChange], [conditions, onChange],
); );
const handleValueChange = useCallback( const handleValueChange = useCallback(
(index: number, value: string) => { (index: number, value: string) => {
onChange( onChange(
filters.map((f, i) => conditions.map((f, i) =>
i === index ? { ...f, value: value || undefined } : f, i === index ? { ...f, value: value || undefined } : f,
), ),
); );
}, },
[filters, onChange], [conditions, onChange],
); );
return ( return (
@@ -255,44 +265,46 @@ export function ViewFilterConfigPopover({
{t("Filter by")} {t("Filter by")}
</Text> </Text>
{filters.length === 0 && ( {conditions.length === 0 && (
<Text size="xs" c="dimmed"> <Text size="xs" c="dimmed">
{t("No filters applied")} {t("No filters applied")}
</Text> </Text>
)} )}
{filters.map((filter, index) => { {conditions.map((condition, index) => {
const needsValue = !NO_VALUE_OPERATORS.includes(filter.operator); const needsValue = !NO_VALUE_OPERATORS.includes(condition.op);
const property = properties.find((p) => p.id === filter.propertyId); const property = properties.find(
(p) => p.id === condition.propertyId,
);
const validOperators = property const validOperators = property
? getOperatorsForType(property.type) ? getOperatorsForType(property.type)
: OPERATORS.map((op) => op.value); : OPERATORS.map((op) => op.value);
const operatorOptions = OPERATORS const operatorOptions = OPERATORS.filter((op) =>
.filter((op) => validOperators.includes(op.value)) validOperators.includes(op.value),
.map((op) => ({ ).map((op) => ({
value: op.value, value: op.value,
label: t(op.labelKey), label: t(op.labelKey),
})); }));
return ( return (
<Group key={index} gap="xs" wrap="nowrap"> <Group key={index} gap="xs" wrap="nowrap">
<Select <Select
size="xs" size="xs"
data={propertyOptions} data={propertyOptions}
value={filter.propertyId} value={condition.propertyId}
onChange={(val) => handlePropertyChange(index, val)} onChange={(val) => handlePropertyChange(index, val)}
style={{ flex: 1 }} style={{ flex: 1 }}
/> />
<Select <Select
size="xs" size="xs"
data={operatorOptions} data={operatorOptions}
value={filter.operator} value={condition.op}
onChange={(val) => handleOperatorChange(index, val)} onChange={(val) => handleOperatorChange(index, val)}
w={130} w={130}
/> />
{needsValue && ( {needsValue && (
<FilterValueInput <FilterValueInput
filter={filter} condition={condition}
property={property} property={property}
onChange={(val) => handleValueChange(index, val)} onChange={(val) => handleValueChange(index, val)}
t={t} t={t}
@@ -0,0 +1,223 @@
import { useEffect } from "react";
import { useAtomValue } from "jotai";
import { useQueryClient, InfiniteData } from "@tanstack/react-query";
import { socketAtom } from "@/features/websocket/atoms/socket-atom";
import {
IBaseProperty,
IBaseRow,
IBaseView,
} from "@/features/base/types/base.types";
import { IPagination } from "@/lib/types";
type BaseRowCreated = {
operation: "base:row:created";
baseId: string;
row: IBaseRow;
requestId?: string | null;
};
type BaseRowUpdated = {
operation: "base:row:updated";
baseId: string;
rowId: string;
updatedCells: Record<string, unknown>;
requestId?: string | null;
};
type BaseRowDeleted = {
operation: "base:row:deleted";
baseId: string;
rowId: string;
requestId?: string | null;
};
type BaseRowReordered = {
operation: "base:row:reordered";
baseId: string;
rowId: string;
position: string;
requestId?: string | null;
};
type BasePropertyEvent = {
operation:
| "base:property:created"
| "base:property:updated"
| "base:property:deleted"
| "base:property:reordered";
baseId: string;
property?: IBaseProperty;
propertyId?: string;
requestId?: string | null;
};
type BaseViewEvent = {
operation:
| "base:view:created"
| "base:view:updated"
| "base:view:deleted";
baseId: string;
view?: IBaseView;
viewId?: string;
};
type BaseSchemaBumped = {
operation: "base:schema:bumped";
baseId: string;
schemaVersion: number;
};
type BaseInboundEvent =
| BaseRowCreated
| BaseRowUpdated
| BaseRowDeleted
| BaseRowReordered
| BasePropertyEvent
| BaseViewEvent
| BaseSchemaBumped
| { operation: string; baseId: string };
/*
* Module-level set of requestIds we've just sent to the server. When the
* socket echoes back the mutation as a `base:row:*` / `base:property:*`
* event with a matching `requestId`, the socket handler drops it because
* the local mutation already updated the cache. Bounded so it can't grow
* unbounded on a long-lived tab.
*/
const outboundRequestIds = new Set<string>();
const OUTBOUND_MAX = 256;
export function markRequestIdOutbound(requestId: string): void {
outboundRequestIds.add(requestId);
if (outboundRequestIds.size > OUTBOUND_MAX) {
const oldest = outboundRequestIds.values().next().value;
if (oldest) outboundRequestIds.delete(oldest);
}
}
/*
* Realtime bridge for a single base. Joins the server's `base-{baseId}`
* room on mount, leaves on unmount, and reconciles the React Query caches
* (`["base-rows", baseId, ...]` and `["bases", baseId]`) when events
* arrive from other clients.
*/
export function useBaseSocket(baseId: string | undefined): void {
const socket = useAtomValue(socketAtom);
const queryClient = useQueryClient();
useEffect(() => {
if (!socket || !baseId) return;
socket.emit("message", { operation: "base:subscribe", baseId });
const handler = (raw: unknown) => {
if (!raw || typeof raw !== "object") return;
const event = raw as BaseInboundEvent;
if (event.baseId !== baseId) return;
const requestId = (event as any).requestId as string | undefined;
if (requestId && outboundRequestIds.has(requestId)) {
outboundRequestIds.delete(requestId);
return;
}
switch (event.operation) {
case "base:row:created": {
queryClient.invalidateQueries({ queryKey: ["base-rows", baseId] });
break;
}
case "base:row:updated": {
const e = event as BaseRowUpdated;
queryClient.setQueriesData<InfiniteData<IPagination<IBaseRow>>>(
{ queryKey: ["base-rows", baseId] },
(old) =>
!old
? old
: {
...old,
pages: old.pages.map((page) => ({
...page,
items: page.items.map((row) =>
row.id === e.rowId
? {
...row,
cells: { ...row.cells, ...e.updatedCells },
}
: row,
),
})),
},
);
break;
}
case "base:row:deleted": {
const e = event as BaseRowDeleted;
queryClient.setQueriesData<InfiniteData<IPagination<IBaseRow>>>(
{ queryKey: ["base-rows", baseId] },
(old) =>
!old
? old
: {
...old,
pages: old.pages.map((page) => ({
...page,
items: page.items.filter((row) => row.id !== e.rowId),
})),
},
);
break;
}
case "base:row:reordered": {
const e = event as BaseRowReordered;
queryClient.setQueriesData<InfiniteData<IPagination<IBaseRow>>>(
{ queryKey: ["base-rows", baseId] },
(old) =>
!old
? old
: {
...old,
pages: old.pages.map((page) => ({
...page,
items: page.items.map((row) =>
row.id === e.rowId
? { ...row, position: e.position }
: row,
),
})),
},
);
break;
}
case "base:property:created":
case "base:property:updated":
case "base:property:deleted":
case "base:property:reordered":
case "base:view:created":
case "base:view:updated":
case "base:view:deleted": {
// Schema/metadata events touch `properties` / `views` on the
// base, not the cell data. The row cache only gets invalidated
// when a `base:schema:bumped` arrives (i.e. cells actually
// migrated) — otherwise a big-base conversion would trigger a
// serial refetch of every cached infinite-query page.
queryClient.invalidateQueries({ queryKey: ["bases", baseId] });
break;
}
case "base:schema:bumped": {
queryClient.invalidateQueries({ queryKey: ["bases", baseId] });
queryClient.invalidateQueries({ queryKey: ["base-rows", baseId] });
break;
}
default:
break;
}
};
socket.on("message", handler);
return () => {
socket.off("message", handler);
socket.emit("message", { operation: "base:unsubscribe", baseId });
};
}, [socket, baseId, queryClient]);
}
@@ -61,7 +61,13 @@ export function useUpdatePropertyMutation() {
}, },
); );
if (result.conversionSummary || variables.type) { // Invalidate rows only for the synchronous (inline) path — the
// HTTP response there is the "cells are migrated" signal. When the
// server hands back a `jobId`, cells are still being rewritten; the
// `base:schema:bumped` socket event is the canonical refetch
// trigger in that case, and we'd only churn pages with old data by
// refetching now.
if (variables.type && !result.jobId) {
queryClient.invalidateQueries({ queryClient.invalidateQueries({
queryKey: ["base-rows", variables.baseId], queryKey: ["base-rows", variables.baseId],
}); });
@@ -16,41 +16,61 @@ import {
UpdateRowInput, UpdateRowInput,
DeleteRowInput, DeleteRowInput,
ReorderRowInput, ReorderRowInput,
ViewFilterConfig, FilterNode,
SearchSpec,
ViewSortConfig, ViewSortConfig,
} from "@/features/base/types/base.types"; } from "@/features/base/types/base.types";
import { notifications } from "@mantine/notifications"; import { notifications } from "@mantine/notifications";
import { queryClient } from "@/main"; import { queryClient } from "@/main";
import { useTranslation } from "react-i18next"; import { useTranslation } from "react-i18next";
import { IPagination } from "@/lib/types"; import { IPagination } from "@/lib/types";
import { markRequestIdOutbound } from "@/features/base/hooks/use-base-socket";
type RowCacheContext = { type RowCacheContext = {
snapshots: [readonly unknown[], InfiniteData<IPagination<IBaseRow>> | undefined][]; snapshots: [readonly unknown[], InfiniteData<IPagination<IBaseRow>> | undefined][];
}; };
// Generate a fresh requestId and pre-register it as outbound so the
// incoming socket echo is suppressed by `useBaseSocket`.
function newRequestId(): string {
const id =
typeof crypto !== "undefined" &&
typeof (crypto as any).randomUUID === "function"
? (crypto as any).randomUUID()
: `${Date.now()}-${Math.random().toString(36).slice(2, 10)}`;
markRequestIdOutbound(id);
return id;
}
export function useBaseRowsQuery( export function useBaseRowsQuery(
baseId: string | undefined, baseId: string | undefined,
filters?: ViewFilterConfig[], filter?: FilterNode,
sorts?: ViewSortConfig[], sorts?: ViewSortConfig[],
search?: SearchSpec,
) { ) {
// Normalize empty arrays to undefined so query keys stay stable const activeFilter = filter ?? undefined;
const activeFilters = filters?.length ? filters : undefined;
const activeSorts = sorts?.length ? sorts : undefined; const activeSorts = sorts?.length ? sorts : undefined;
const activeSearch = search?.query ? search : undefined;
return useInfiniteQuery({ return useInfiniteQuery({
queryKey: ["base-rows", baseId, activeFilters, activeSorts], queryKey: ["base-rows", baseId, activeFilter, activeSorts, activeSearch],
queryFn: ({ pageParam }) => queryFn: ({ pageParam }) =>
listRows(baseId!, { listRows(baseId!, {
cursor: pageParam, cursor: pageParam,
limit: 100, limit: 100,
filters: activeFilters, filter: activeFilter,
sorts: activeSorts, sorts: activeSorts,
search: activeSearch,
}), }),
enabled: !!baseId, enabled: !!baseId,
initialPageParam: undefined as string | undefined, initialPageParam: undefined as string | undefined,
getNextPageParam: (lastPage: IPagination<IBaseRow>) => getNextPageParam: (lastPage: IPagination<IBaseRow>) =>
lastPage.meta?.nextCursor ?? undefined, lastPage.meta?.nextCursor ?? undefined,
staleTime: 5 * 60 * 1000, staleTime: 5 * 60 * 1000,
// Cap cached pages so an invalidate after a type-conversion refetches
// a bounded set instead of serially re-requesting every page the user
// has ever scrolled through.
maxPages: 5,
}); });
} }
@@ -64,7 +84,7 @@ export function flattenRows(
export function useCreateRowMutation() { export function useCreateRowMutation() {
const { t } = useTranslation(); const { t } = useTranslation();
return useMutation<IBaseRow, Error, CreateRowInput>({ return useMutation<IBaseRow, Error, CreateRowInput>({
mutationFn: (data) => createRow(data), mutationFn: (data) => createRow({ ...data, requestId: newRequestId() }),
onSuccess: (newRow) => { onSuccess: (newRow) => {
queryClient.setQueriesData<InfiniteData<IPagination<IBaseRow>>>( queryClient.setQueriesData<InfiniteData<IPagination<IBaseRow>>>(
{ queryKey: ["base-rows", newRow.baseId] }, { queryKey: ["base-rows", newRow.baseId] },
@@ -95,7 +115,7 @@ export function useCreateRowMutation() {
export function useUpdateRowMutation() { export function useUpdateRowMutation() {
const { t } = useTranslation(); const { t } = useTranslation();
return useMutation<IBaseRow, Error, UpdateRowInput, RowCacheContext>({ return useMutation<IBaseRow, Error, UpdateRowInput, RowCacheContext>({
mutationFn: (data) => updateRow(data), mutationFn: (data) => updateRow({ ...data, requestId: newRequestId() }),
onMutate: async (variables) => { onMutate: async (variables) => {
await queryClient.cancelQueries({ await queryClient.cancelQueries({
queryKey: ["base-rows", variables.baseId], queryKey: ["base-rows", variables.baseId],
@@ -164,7 +184,7 @@ export function useUpdateRowMutation() {
export function useDeleteRowMutation() { export function useDeleteRowMutation() {
const { t } = useTranslation(); const { t } = useTranslation();
return useMutation<void, Error, DeleteRowInput, RowCacheContext>({ return useMutation<void, Error, DeleteRowInput, RowCacheContext>({
mutationFn: (data) => deleteRow(data), mutationFn: (data) => deleteRow({ ...data, requestId: newRequestId() }),
onMutate: async (variables) => { onMutate: async (variables) => {
await queryClient.cancelQueries({ await queryClient.cancelQueries({
queryKey: ["base-rows", variables.baseId], queryKey: ["base-rows", variables.baseId],
@@ -207,7 +227,7 @@ export function useDeleteRowMutation() {
export function useReorderRowMutation() { export function useReorderRowMutation() {
const { t } = useTranslation(); const { t } = useTranslation();
return useMutation<void, Error, ReorderRowInput, RowCacheContext>({ return useMutation<void, Error, ReorderRowInput, RowCacheContext>({
mutationFn: (data) => reorderRow(data), mutationFn: (data) => reorderRow({ ...data, requestId: newRequestId() }),
onMutate: async (variables) => { onMutate: async (variables) => {
await queryClient.cancelQueries({ await queryClient.cancelQueries({
queryKey: ["base-rows", variables.baseId], queryKey: ["base-rows", variables.baseId],
@@ -18,7 +18,8 @@ import {
UpdateViewInput, UpdateViewInput,
DeleteViewInput, DeleteViewInput,
UpdatePropertyResult, UpdatePropertyResult,
ViewFilterConfig, FilterNode,
SearchSpec,
ViewSortConfig, ViewSortConfig,
} from "@/features/base/types/base.types"; } from "@/features/base/types/base.types";
import { IPagination } from "@/lib/types"; import { IPagination } from "@/lib/types";
@@ -111,8 +112,9 @@ export async function listRows(
viewId?: string; viewId?: string;
cursor?: string; cursor?: string;
limit?: number; limit?: number;
filters?: ViewFilterConfig[]; filter?: FilterNode;
sorts?: ViewSortConfig[]; sorts?: ViewSortConfig[];
search?: SearchSpec;
}, },
): Promise<IPagination<IBaseRow>> { ): Promise<IPagination<IBaseRow>> {
const req = await api.post("/bases/rows/list", { baseId, ...params }); const req = await api.post("/bases/rows/list", { baseId, ...params });
@@ -77,6 +77,11 @@
color: light-dark(var(--mantine-color-gray-5), var(--mantine-color-dark-3)); color: light-dark(var(--mantine-color-gray-5), var(--mantine-color-dark-3));
} }
.headerConvertingSpinner {
flex-shrink: 0;
margin-left: auto;
}
.resizeHandle { .resizeHandle {
position: absolute; position: absolute;
right: 0; right: 0;
@@ -81,6 +81,11 @@ export type IBaseProperty = {
type: BasePropertyType; type: BasePropertyType;
position: string; position: string;
typeOptions: TypeOptions; typeOptions: TypeOptions;
// Set while a background type-conversion job is rewriting cells. The
// live `type` stays on the old kind until the job commits, so cells
// render correctly; the column header shows a "Converting…" badge.
pendingType?: BasePropertyType | null;
pendingTypeOptions?: TypeOptions | null;
isPrimary: boolean; isPrimary: boolean;
workspaceId: string; workspaceId: string;
createdAt: string; createdAt: string;
@@ -104,27 +109,49 @@ export type ViewSortConfig = {
direction: 'asc' | 'desc'; direction: 'asc' | 'desc';
}; };
export type ViewFilterOperator = // Matches the server's engine operator set (core/base/engine/schema.zod.ts).
| 'equals' export type FilterOperator =
| 'notEquals' | 'eq'
| 'neq'
| 'gt'
| 'gte'
| 'lt'
| 'lte'
| 'contains' | 'contains'
| 'notContains' | 'ncontains'
| 'startsWith'
| 'endsWith'
| 'isEmpty' | 'isEmpty'
| 'isNotEmpty' | 'isNotEmpty'
| 'greaterThan'
| 'lessThan'
| 'before' | 'before'
| 'after'; | 'after'
| 'onOrBefore'
| 'onOrAfter'
| 'any'
| 'none'
| 'all';
export type ViewFilterConfig = { export type FilterCondition = {
propertyId: string; propertyId: string;
operator: ViewFilterOperator; op: FilterOperator;
value?: unknown; value?: unknown;
}; };
export type FilterGroup = {
op: 'and' | 'or';
children: Array<FilterCondition | FilterGroup>;
};
export type FilterNode = FilterCondition | FilterGroup;
export type SearchSpec = {
query: string;
mode?: 'trgm' | 'fts';
};
export type ViewConfig = { export type ViewConfig = {
sorts?: ViewSortConfig[]; sorts?: ViewSortConfig[];
filters?: ViewFilterConfig[]; filter?: FilterGroup;
visiblePropertyIds?: string[]; visiblePropertyIds?: string[];
hiddenPropertyIds?: string[]; hiddenPropertyIds?: string[];
propertyWidths?: Record<string, number>; propertyWidths?: Record<string, number>;
@@ -183,6 +210,7 @@ export type CreatePropertyInput = {
name: string; name: string;
type: BasePropertyType; type: BasePropertyType;
typeOptions?: TypeOptions; typeOptions?: TypeOptions;
requestId?: string;
}; };
export type UpdatePropertyInput = { export type UpdatePropertyInput = {
@@ -191,40 +219,47 @@ export type UpdatePropertyInput = {
name?: string; name?: string;
type?: BasePropertyType; type?: BasePropertyType;
typeOptions?: TypeOptions; typeOptions?: TypeOptions;
requestId?: string;
}; };
export type DeletePropertyInput = { export type DeletePropertyInput = {
propertyId: string; propertyId: string;
baseId: string; baseId: string;
requestId?: string;
}; };
export type ReorderPropertyInput = { export type ReorderPropertyInput = {
propertyId: string; propertyId: string;
baseId: string; baseId: string;
position: string; position: string;
requestId?: string;
}; };
export type CreateRowInput = { export type CreateRowInput = {
baseId: string; baseId: string;
cells?: Record<string, unknown>; cells?: Record<string, unknown>;
afterRowId?: string; afterRowId?: string;
requestId?: string;
}; };
export type UpdateRowInput = { export type UpdateRowInput = {
rowId: string; rowId: string;
baseId: string; baseId: string;
cells: Record<string, unknown>; cells: Record<string, unknown>;
requestId?: string;
}; };
export type DeleteRowInput = { export type DeleteRowInput = {
rowId: string; rowId: string;
baseId: string; baseId: string;
requestId?: string;
}; };
export type ReorderRowInput = { export type ReorderRowInput = {
rowId: string; rowId: string;
baseId: string; baseId: string;
position: string; position: string;
requestId?: string;
}; };
export type CreateViewInput = { export type CreateViewInput = {
@@ -247,13 +282,11 @@ export type DeleteViewInput = {
baseId: string; baseId: string;
}; };
export type ConversionSummary = {
converted: number;
cleared: number;
total: number;
};
export type UpdatePropertyResult = { export type UpdatePropertyResult = {
property: IBaseProperty; property: IBaseProperty;
conversionSummary: ConversionSummary | null; // Non-null when the property change kicked off a BullMQ type-conversion
// job (select/multiSelect/person/file → anything, or any → system type).
// Client can listen for `base:schema:bumped` on the base room to know
// when the job finished migrating cells.
jobId: string | null;
}; };
@@ -15,4 +15,25 @@ export enum EventName {
WORKSPACE_CREATED = 'workspace.created', WORKSPACE_CREATED = 'workspace.created',
WORKSPACE_UPDATED = 'workspace.updated', WORKSPACE_UPDATED = 'workspace.updated',
WORKSPACE_DELETED = 'workspace.deleted', WORKSPACE_DELETED = 'workspace.deleted',
BASE_CREATED = 'base.created',
BASE_UPDATED = 'base.updated',
BASE_DELETED = 'base.deleted',
BASE_ROW_CREATED = 'base.row.created',
BASE_ROW_UPDATED = 'base.row.updated',
BASE_ROW_DELETED = 'base.row.deleted',
BASE_ROW_RESTORED = 'base.row.restored',
BASE_ROW_REORDERED = 'base.row.reordered',
BASE_PROPERTY_CREATED = 'base.property.created',
BASE_PROPERTY_UPDATED = 'base.property.updated',
BASE_PROPERTY_DELETED = 'base.property.deleted',
BASE_PROPERTY_REORDERED = 'base.property.reordered',
BASE_VIEW_CREATED = 'base.view.created',
BASE_VIEW_UPDATED = 'base.view.updated',
BASE_VIEW_DELETED = 'base.view.deleted',
BASE_SCHEMA_BUMPED = 'base.schema.bumped',
} }
+25 -2
View File
@@ -1,4 +1,5 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { BullModule } from '@nestjs/bullmq';
import { BaseController } from './controllers/base.controller'; import { BaseController } from './controllers/base.controller';
import { BasePropertyController } from './controllers/base-property.controller'; import { BasePropertyController } from './controllers/base-property.controller';
import { BaseRowController } from './controllers/base-row.controller'; import { BaseRowController } from './controllers/base-row.controller';
@@ -7,15 +8,37 @@ import { BaseService } from './services/base.service';
import { BasePropertyService } from './services/base-property.service'; import { BasePropertyService } from './services/base-property.service';
import { BaseRowService } from './services/base-row.service'; import { BaseRowService } from './services/base-row.service';
import { BaseViewService } from './services/base-view.service'; import { BaseViewService } from './services/base-view.service';
import { BaseQueueProcessor } from './processors/base-queue.processor';
import { BaseWsService } from './realtime/base-ws.service';
import { BaseWsConsumers } from './realtime/base-ws-consumers';
import { BasePresenceService } from './realtime/base-presence.service';
import { QueueName } from '../../integrations/queue/constants';
@Module({ @Module({
imports: [BullModule.registerQueue({ name: QueueName.BASE_QUEUE })],
controllers: [ controllers: [
BaseController, BaseController,
BasePropertyController, BasePropertyController,
BaseRowController, BaseRowController,
BaseViewController, BaseViewController,
], ],
providers: [BaseService, BasePropertyService, BaseRowService, BaseViewService], providers: [
exports: [BaseService, BasePropertyService, BaseRowService, BaseViewService], BaseService,
BasePropertyService,
BaseRowService,
BaseViewService,
BaseQueueProcessor,
BasePresenceService,
BaseWsService,
BaseWsConsumers,
],
exports: [
BaseService,
BasePropertyService,
BaseRowService,
BaseViewService,
BaseWsService,
BasePresenceService,
],
}) })
export class BaseModule {} export class BaseModule {}
+122 -23
View File
@@ -33,7 +33,7 @@ export type BasePropertyTypeValue =
export const BASE_PROPERTY_TYPES = Object.values(BasePropertyType); export const BASE_PROPERTY_TYPES = Object.values(BasePropertyType);
export const choiceSchema = z.object({ export const choiceSchema = z.object({
id: z.string().uuid(), id: z.uuid(),
name: z.string().min(1), name: z.string().min(1),
color: z.string(), color: z.string(),
category: z.enum(['todo', 'inProgress', 'complete']).optional(), category: z.enum(['todo', 'inProgress', 'complete']).optional(),
@@ -42,10 +42,10 @@ export const choiceSchema = z.object({
export const selectTypeOptionsSchema = z export const selectTypeOptionsSchema = z
.object({ .object({
choices: z.array(choiceSchema).default([]), choices: z.array(choiceSchema).default([]),
choiceOrder: z.array(z.string().uuid()).default([]), choiceOrder: z.array(z.uuid()).default([]),
disableColors: z.boolean().optional(), disableColors: z.boolean().optional(),
defaultValue: z defaultValue: z
.union([z.string().uuid(), z.array(z.string().uuid())]) .union([z.uuid(), z.array(z.uuid())])
.nullable() .nullable()
.optional(), .optional(),
}) })
@@ -147,21 +147,21 @@ export function parseTypeOptions(
const cellValueSchemaMap: Partial<Record<BasePropertyTypeValue, z.ZodType>> = { const cellValueSchemaMap: Partial<Record<BasePropertyTypeValue, z.ZodType>> = {
[BasePropertyType.TEXT]: z.string(), [BasePropertyType.TEXT]: z.string(),
[BasePropertyType.NUMBER]: z.number(), [BasePropertyType.NUMBER]: z.number(),
[BasePropertyType.SELECT]: z.string().uuid(), [BasePropertyType.SELECT]: z.uuid(),
[BasePropertyType.STATUS]: z.string().uuid(), [BasePropertyType.STATUS]: z.uuid(),
[BasePropertyType.MULTI_SELECT]: z.array(z.string().uuid()), [BasePropertyType.MULTI_SELECT]: z.array(z.uuid()),
[BasePropertyType.DATE]: z.string(), [BasePropertyType.DATE]: z.string(),
[BasePropertyType.PERSON]: z.union([z.string().uuid(), z.array(z.string().uuid())]), [BasePropertyType.PERSON]: z.union([z.uuid(), z.array(z.uuid())]),
[BasePropertyType.FILE]: z.array(z.object({ [BasePropertyType.FILE]: z.array(z.object({
id: z.string().uuid(), id: z.uuid(),
fileName: z.string(), fileName: z.string(),
mimeType: z.string().optional(), mimeType: z.string().optional(),
fileSize: z.number().optional(), fileSize: z.number().optional(),
filePath: z.string().optional(), filePath: z.string().optional(),
})), })),
[BasePropertyType.CHECKBOX]: z.boolean(), [BasePropertyType.CHECKBOX]: z.boolean(),
[BasePropertyType.URL]: z.string().url(), [BasePropertyType.URL]: z.url(),
[BasePropertyType.EMAIL]: z.string().email(), [BasePropertyType.EMAIL]: z.email(),
}; };
export function getCellValueSchema( export function getCellValueSchema(
@@ -181,15 +181,83 @@ export function validateCellValue(
return schema.safeParse(value); return schema.safeParse(value);
} }
/*
* Resolution context for conversions where the source type stores IDs
* (select / multiSelect: choice uuid; person: user uuid; file: attachment
* uuid). Callers must always supply this — the only invoker is the
* `BASE_TYPE_CONVERSION` BullMQ worker, which builds the context per
* chunk of rows (see `tasks/base-type-conversion.task.ts`).
*/
export type CellConversionContext = {
fromTypeOptions?: unknown;
userNames?: Map<string, string>;
attachmentNames?: Map<string, string>;
};
function resolveChoiceName(
typeOptions: unknown,
id: unknown,
): string | undefined {
if (!typeOptions || typeof typeOptions !== 'object') return undefined;
const choices = (typeOptions as any).choices;
if (!Array.isArray(choices)) return undefined;
const match = choices.find((c: any) => c?.id === String(id));
return typeof match?.name === 'string' ? match.name : undefined;
}
export function attemptCellConversion( export function attemptCellConversion(
fromType: BasePropertyTypeValue, fromType: BasePropertyTypeValue,
toType: BasePropertyTypeValue, toType: BasePropertyTypeValue,
value: unknown, value: unknown,
ctx: CellConversionContext,
): { converted: boolean; value: unknown } { ): { converted: boolean; value: unknown } {
if (value === null || value === undefined) { if (value === null || value === undefined) {
return { converted: true, value: null }; return { converted: true, value: null };
} }
// Resolve IDs to display strings before any direct parse. `select → text`
// and `multiSelect → text` would otherwise short-circuit on z.string()
// parsing the UUID itself and return the raw UUID instead of the name.
if (toType === BasePropertyType.TEXT) {
if (
fromType === BasePropertyType.SELECT ||
fromType === BasePropertyType.STATUS
) {
const name = resolveChoiceName(ctx.fromTypeOptions, value);
return { converted: true, value: name ?? '' };
}
if (fromType === BasePropertyType.MULTI_SELECT && Array.isArray(value)) {
const parts = value
.map((v) => resolveChoiceName(ctx.fromTypeOptions, v))
.filter((v): v is string => typeof v === 'string' && v.length > 0);
return { converted: true, value: parts.join(', ') };
}
if (fromType === BasePropertyType.PERSON && ctx.userNames) {
const ids = Array.isArray(value) ? value : [value];
const parts = ids
.map((v) => ctx.userNames!.get(String(v)))
.filter((v): v is string => typeof v === 'string' && v.length > 0);
return { converted: true, value: parts.join(', ') };
}
if (fromType === BasePropertyType.FILE && Array.isArray(value)) {
const parts = value
.map((f: any) => {
if (f && typeof f === 'object') {
if (typeof f.fileName === 'string') return f.fileName;
if (typeof f.id === 'string' && ctx.attachmentNames) {
return ctx.attachmentNames.get(f.id);
}
}
if (typeof f === 'string' && ctx.attachmentNames) {
return ctx.attachmentNames.get(f);
}
return undefined;
})
.filter((v): v is string => typeof v === 'string' && v.length > 0);
return { converted: true, value: parts.join(', ') };
}
}
const targetSchema = cellValueSchemaMap[toType]; const targetSchema = cellValueSchemaMap[toType];
if (!targetSchema) { if (!targetSchema) {
return { converted: false, value: null }; return { converted: false, value: null };
@@ -247,35 +315,66 @@ export function attemptCellConversion(
} }
export const viewSortSchema = z.object({ export const viewSortSchema = z.object({
propertyId: z.string().uuid(), propertyId: z.uuid(),
direction: z.enum(['asc', 'desc']), direction: z.enum(['asc', 'desc']),
}); });
export const viewFilterSchema = z.object({ /*
propertyId: z.string().uuid(), * View-stored filter shape matches the engine's predicate tree (see
operator: z.enum([ * `core/base/engine/schema.zod.ts`). No legacy flat-array / operator-name
'equals', * variants are accepted — stored view configs use `op` (eq / neq / gt /
'notEquals', * lt / contains / ncontains / ...) and nested and/or groups.
*/
const viewFilterConditionSchema = z.object({
propertyId: z.uuid(),
op: z.enum([
'eq',
'neq',
'gt',
'gte',
'lt',
'lte',
'contains', 'contains',
'notContains', 'ncontains',
'startsWith',
'endsWith',
'isEmpty', 'isEmpty',
'isNotEmpty', 'isNotEmpty',
'greaterThan',
'lessThan',
'before', 'before',
'after', 'after',
'onOrBefore',
'onOrAfter',
'any',
'none',
'all',
]), ]),
value: z.unknown().optional(), value: z.unknown().optional(),
}); });
type ViewFilterCondition = z.infer<typeof viewFilterConditionSchema>;
type ViewFilterGroup = {
op: 'and' | 'or';
children: Array<ViewFilterCondition | ViewFilterGroup>;
};
const viewFilterNodeSchema: z.ZodType<ViewFilterCondition | ViewFilterGroup> =
z.lazy(() => z.union([viewFilterConditionSchema, viewFilterGroupSchema]));
const viewFilterGroupSchema: z.ZodType<ViewFilterGroup> = z.lazy(() =>
z.object({
op: z.enum(['and', 'or']),
children: z.array(viewFilterNodeSchema),
}),
);
export const viewConfigSchema = z export const viewConfigSchema = z
.object({ .object({
sorts: z.array(viewSortSchema).optional(), sorts: z.array(viewSortSchema).optional(),
filters: z.array(viewFilterSchema).optional(), filter: viewFilterGroupSchema.optional(),
visiblePropertyIds: z.array(z.string().uuid()).optional(), visiblePropertyIds: z.array(z.uuid()).optional(),
hiddenPropertyIds: z.array(z.string().uuid()).optional(), hiddenPropertyIds: z.array(z.uuid()).optional(),
propertyWidths: z.record(z.string(), z.number().positive()).optional(), propertyWidths: z.record(z.string(), z.number().positive()).optional(),
propertyOrder: z.array(z.string().uuid()).optional(), propertyOrder: z.array(z.uuid()).optional(),
}) })
.passthrough(); .passthrough();
@@ -52,12 +52,16 @@ export class BasePropertyController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
return this.basePropertyService.create(workspace.id, dto); return this.basePropertyService.create(workspace.id, dto, user.id);
} }
@HttpCode(HttpStatus.OK) @HttpCode(HttpStatus.OK)
@Post('update') @Post('update')
async update(@Body() dto: UpdatePropertyDto, @AuthUser() user: User) { async update(
@Body() dto: UpdatePropertyDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId); const base = await this.baseRepo.findById(dto.baseId);
if (!base) { if (!base) {
throw new NotFoundException('Base not found'); throw new NotFoundException('Base not found');
@@ -68,12 +72,16 @@ export class BasePropertyController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
return this.basePropertyService.update(dto); return this.basePropertyService.update(dto, workspace.id, user.id);
} }
@HttpCode(HttpStatus.OK) @HttpCode(HttpStatus.OK)
@Post('delete') @Post('delete')
async delete(@Body() dto: DeletePropertyDto, @AuthUser() user: User) { async delete(
@Body() dto: DeletePropertyDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId); const base = await this.baseRepo.findById(dto.baseId);
if (!base) { if (!base) {
throw new NotFoundException('Base not found'); throw new NotFoundException('Base not found');
@@ -84,12 +92,16 @@ export class BasePropertyController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
await this.basePropertyService.delete(dto); await this.basePropertyService.delete(dto, workspace.id, user.id);
} }
@HttpCode(HttpStatus.OK) @HttpCode(HttpStatus.OK)
@Post('reorder') @Post('reorder')
async reorder(@Body() dto: ReorderPropertyDto, @AuthUser() user: User) { async reorder(
@Body() dto: ReorderPropertyDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId); const base = await this.baseRepo.findById(dto.baseId);
if (!base) { if (!base) {
throw new NotFoundException('Base not found'); throw new NotFoundException('Base not found');
@@ -100,6 +112,6 @@ export class BasePropertyController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
await this.basePropertyService.reorder(dto); await this.basePropertyService.reorder(dto, workspace.id, user.id);
} }
} }
@@ -60,58 +60,10 @@ export class BaseRowController {
@HttpCode(HttpStatus.OK) @HttpCode(HttpStatus.OK)
@Post('info') @Post('info')
async getRow(@Body() dto: RowIdDto, @AuthUser() user: User) { async getRow(
const base = await this.baseRepo.findById(dto.baseId); @Body() dto: RowIdDto,
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
return this.baseRowService.getRowInfo(dto.rowId, dto.baseId);
}
@HttpCode(HttpStatus.OK)
@Post('update')
async update(@Body() dto: UpdateRowDto, @AuthUser() user: User) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
return this.baseRowService.update(dto, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('delete')
async delete(@Body() dto: DeleteRowDto, @AuthUser() user: User) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
await this.baseRowService.delete(dto.rowId, dto.baseId);
}
@HttpCode(HttpStatus.OK)
@Post('list')
async list(
@Body() dto: ListRowsDto,
@Body() pagination: PaginationOptions,
@AuthUser() user: User, @AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) { ) {
const base = await this.baseRepo.findById(dto.baseId); const base = await this.baseRepo.findById(dto.baseId);
if (!base) { if (!base) {
@@ -123,12 +75,16 @@ export class BaseRowController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
return this.baseRowService.list(dto, pagination); return this.baseRowService.getRowInfo(dto.rowId, dto.baseId, workspace.id);
} }
@HttpCode(HttpStatus.OK) @HttpCode(HttpStatus.OK)
@Post('reorder') @Post('update')
async reorder(@Body() dto: ReorderRowDto, @AuthUser() user: User) { async update(
@Body() dto: UpdateRowDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId); const base = await this.baseRepo.findById(dto.baseId);
if (!base) { if (!base) {
throw new NotFoundException('Base not found'); throw new NotFoundException('Base not found');
@@ -139,6 +95,67 @@ export class BaseRowController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
await this.baseRowService.reorder(dto); return this.baseRowService.update(dto, workspace.id, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('delete')
async delete(
@Body() dto: DeleteRowDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
await this.baseRowService.delete(dto, workspace.id, user.id);
}
@HttpCode(HttpStatus.OK)
@Post('list')
async list(
@Body() dto: ListRowsDto,
@Body() pagination: PaginationOptions,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Read, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
return this.baseRowService.list(dto, pagination, workspace.id);
}
@HttpCode(HttpStatus.OK)
@Post('reorder')
async reorder(
@Body() dto: ReorderRowDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId);
if (!base) {
throw new NotFoundException('Base not found');
}
const ability = await this.spaceAbility.createForUser(user, base.spaceId);
if (ability.cannot(SpaceCaslAction.Edit, SpaceCaslSubject.Base)) {
throw new ForbiddenException();
}
await this.baseRowService.reorder(dto, workspace.id, user.id);
} }
} }
@@ -54,7 +54,11 @@ export class BaseViewController {
@HttpCode(HttpStatus.OK) @HttpCode(HttpStatus.OK)
@Post('update') @Post('update')
async update(@Body() dto: UpdateViewDto, @AuthUser() user: User) { async update(
@Body() dto: UpdateViewDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId); const base = await this.baseRepo.findById(dto.baseId);
if (!base) { if (!base) {
throw new NotFoundException('Base not found'); throw new NotFoundException('Base not found');
@@ -65,12 +69,16 @@ export class BaseViewController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
return this.baseViewService.update(dto); return this.baseViewService.update(dto, workspace.id, user.id);
} }
@HttpCode(HttpStatus.OK) @HttpCode(HttpStatus.OK)
@Post('delete') @Post('delete')
async delete(@Body() dto: DeleteViewDto, @AuthUser() user: User) { async delete(
@Body() dto: DeleteViewDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId); const base = await this.baseRepo.findById(dto.baseId);
if (!base) { if (!base) {
throw new NotFoundException('Base not found'); throw new NotFoundException('Base not found');
@@ -81,12 +89,16 @@ export class BaseViewController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
await this.baseViewService.delete(dto); await this.baseViewService.delete(dto, workspace.id, user.id);
} }
@HttpCode(HttpStatus.OK) @HttpCode(HttpStatus.OK)
@Post('list') @Post('list')
async list(@Body() dto: BaseIdDto, @AuthUser() user: User) { async list(
@Body() dto: BaseIdDto,
@AuthUser() user: User,
@AuthWorkspace() workspace: Workspace,
) {
const base = await this.baseRepo.findById(dto.baseId); const base = await this.baseRepo.findById(dto.baseId);
if (!base) { if (!base) {
throw new NotFoundException('Base not found'); throw new NotFoundException('Base not found');
@@ -97,6 +109,6 @@ export class BaseViewController {
throw new ForbiddenException(); throw new ForbiddenException();
} }
return this.baseViewService.listByBaseId(dto.baseId); return this.baseViewService.listByBaseId(dto.baseId, workspace.id);
} }
} }
@@ -9,6 +9,12 @@ export class CreateRowDto {
cells?: Record<string, unknown>; cells?: Record<string, unknown>;
@IsOptional() @IsOptional()
@IsString() @IsUUID()
afterRowId?: string; afterRowId?: string;
// Echoed back in the socket event so the originating client can skip
// replaying its own write.
@IsOptional()
@IsString()
requestId?: string;
} }
@@ -27,6 +27,10 @@ export class UpdatePropertyDto {
@IsOptional() @IsOptional()
@IsObject() @IsObject()
typeOptions?: Record<string, unknown>; typeOptions?: Record<string, unknown>;
@IsOptional()
@IsString()
requestId?: string;
} }
export class DeletePropertyDto { export class DeletePropertyDto {
@@ -35,6 +39,10 @@ export class DeletePropertyDto {
@IsUUID() @IsUUID()
baseId: string; baseId: string;
@IsOptional()
@IsString()
requestId?: string;
} }
export class ReorderPropertyDto { export class ReorderPropertyDto {
@@ -47,4 +55,8 @@ export class ReorderPropertyDto {
@IsString() @IsString()
@IsNotEmpty() @IsNotEmpty()
position: string; position: string;
@IsOptional()
@IsString()
requestId?: string;
} }
+35 -20
View File
@@ -1,5 +1,16 @@
import { IsNotEmpty, IsObject, IsOptional, IsString, IsUUID, IsArray, ValidateNested } from 'class-validator'; import {
IsIn,
IsNotEmpty,
IsObject,
IsOptional,
IsString,
IsUUID,
IsArray,
ValidateNested,
} from 'class-validator';
import { Type } from 'class-transformer'; import { Type } from 'class-transformer';
// `filter` / `search` shapes are validated by the engine's Zod schemas
// at the service boundary (`core/base/engine/schema.zod.ts`).
export class UpdateRowDto { export class UpdateRowDto {
@IsUUID() @IsUUID()
@@ -10,6 +21,10 @@ export class UpdateRowDto {
@IsObject() @IsObject()
cells: Record<string, unknown>; cells: Record<string, unknown>;
@IsOptional()
@IsString()
requestId?: string;
} }
export class DeleteRowDto { export class DeleteRowDto {
@@ -18,6 +33,10 @@ export class DeleteRowDto {
@IsUUID() @IsUUID()
baseId: string; baseId: string;
@IsOptional()
@IsString()
requestId?: string;
} }
export class RowIdDto { export class RowIdDto {
@@ -28,25 +47,12 @@ export class RowIdDto {
baseId: string; baseId: string;
} }
class FilterDto {
@IsUUID()
propertyId: string;
@IsString()
@IsNotEmpty()
operator: string;
@IsOptional()
value?: unknown;
}
class SortDto { class SortDto {
@IsUUID() @IsUUID()
propertyId: string; propertyId: string;
@IsString() @IsIn(['asc', 'desc'])
@IsNotEmpty() direction: 'asc' | 'desc';
direction: string;
} }
export class ListRowsDto { export class ListRowsDto {
@@ -57,17 +63,22 @@ export class ListRowsDto {
@IsUUID() @IsUUID()
viewId?: string; viewId?: string;
// Compound filter tree. Shape validated by the engine's Zod schema at
// the service boundary.
@IsOptional() @IsOptional()
@IsArray() @IsObject()
@ValidateNested({ each: true }) filter?: unknown;
@Type(() => FilterDto)
filters?: FilterDto[];
@IsOptional() @IsOptional()
@IsArray() @IsArray()
@ValidateNested({ each: true }) @ValidateNested({ each: true })
@Type(() => SortDto) @Type(() => SortDto)
sorts?: SortDto[]; sorts?: SortDto[];
// `{ query, mode? }` — Zod-validated at the service boundary.
@IsOptional()
@IsObject()
search?: unknown;
} }
export class ReorderRowDto { export class ReorderRowDto {
@@ -80,4 +91,8 @@ export class ReorderRowDto {
@IsString() @IsString()
@IsNotEmpty() @IsNotEmpty()
position: string; position: string;
@IsOptional()
@IsString()
requestId?: string;
} }
+111
View File
@@ -0,0 +1,111 @@
import { BadRequestException } from '@nestjs/common';
import { SortBuild, TailKey } from './sort';
type ValueType = 'numeric' | 'date' | 'bool' | 'text';
// Hard cap on decoded cursor size so a tampered cursor can't force a large
// JSON parse. Real cursors are <1KB (a handful of field values).
const MAX_CURSOR_DECODED_BYTES = 4096;
/*
* Null-safe cursor encoder. The previous encoder used a literal string
* sentinel `__null__` for NULLs, which could collide with real cell
* values. This encoder never sees NULL because sort expressions are
* sentinel-wrapped (see sort.ts). It also represents ±Infinity
* explicitly so JSON round-tripping is lossless.
*/
export function makeCursor(sorts: SortBuild[], tailKeys: TailKey[]) {
const types = new Map<string, ValueType>();
for (const s of sorts) types.set(s.key, s.valueType);
for (const k of tailKeys) types.set(k, 'text');
return {
encodeCursor(values: Array<[string, unknown]>): string {
const payload: Record<string, string> = {};
for (const [k, v] of values) {
payload[k] = encodeValue(v, types.get(k) ?? 'text');
}
return Buffer.from(JSON.stringify(payload), 'utf8').toString('base64url');
},
decodeCursor(
cursor: string,
fieldNames: string[],
): Record<string, string> {
let parsed: Record<string, string>;
try {
parsed = JSON.parse(
Buffer.from(cursor, 'base64url').toString('utf8'),
);
} catch {
throw new BadRequestException('Invalid cursor');
}
if (typeof parsed !== 'object' || parsed === null) {
throw new BadRequestException('Invalid cursor payload');
}
const out: Record<string, string> = {};
for (const name of fieldNames) {
if (!(name in parsed)) {
throw new BadRequestException(`Cursor missing field: ${name}`);
}
out[name] = parsed[name];
}
return out;
},
parseCursor(decoded: Record<string, string>): Record<string, unknown> {
const out: Record<string, unknown> = {};
for (const [k, raw] of Object.entries(decoded)) {
out[k] = decodeValue(raw, types.get(k) ?? 'text');
}
return out;
},
};
}
function encodeValue(value: unknown, type: ValueType): string {
if (type === 'numeric') {
if (value === null || value === undefined) return '';
const n = typeof value === 'number' ? value : parseFloat(String(value));
if (n === Number.POSITIVE_INFINITY || String(value) === 'Infinity') {
return 'inf';
}
if (n === Number.NEGATIVE_INFINITY || String(value) === '-Infinity') {
return '-inf';
}
if (Number.isNaN(n)) return '';
return String(n);
}
if (type === 'date') {
if (value === null || value === undefined) return '';
if (value instanceof Date) return value.toISOString();
const s = String(value);
if (s === 'infinity') return 'inf';
if (s === '-infinity') return '-inf';
return s;
}
if (type === 'bool') {
return value ? '1' : '0';
}
return value == null ? '' : String(value);
}
function decodeValue(raw: string, type: ValueType): unknown {
if (type === 'numeric') {
if (raw === 'inf') return Number.POSITIVE_INFINITY;
if (raw === '-inf') return Number.NEGATIVE_INFINITY;
if (raw === '') return null;
return parseFloat(raw);
}
if (type === 'date') {
if (raw === 'inf') return 'infinity';
if (raw === '-inf') return '-infinity';
if (raw === '') return null;
return raw;
}
if (type === 'bool') {
return raw === '1';
}
return raw;
}
@@ -0,0 +1,86 @@
import { SelectQueryBuilder } from 'kysely';
import { DB } from '@docmost/db/types/db';
import { BaseRow } from '@docmost/db/types/entity.types';
import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
import {
CursorPaginationResult,
executeWithCursorPagination,
} from '@docmost/db/pagination/cursor-pagination';
import { FilterNode, SearchSpec, SortSpec } from './schema.zod';
import { buildWhere, PropertySchema } from './predicate';
import { buildSorts, CURSOR_TAIL_KEYS, SortBuild } from './sort';
import { buildSearch } from './search';
import { makeCursor } from './cursor';
export type EngineListOpts = {
filter?: FilterNode;
sorts?: SortSpec[];
search?: SearchSpec;
schema: PropertySchema;
pagination: PaginationOptions;
};
/*
* Top-level orchestrator. Callers (repos, services) provide a base
* Kysely query already scoped to the target base + workspace + alive
* rows; this adds search/filter/sort clauses and runs cursor pagination.
*/
export async function runListQuery(
base: SelectQueryBuilder<DB, 'baseRows', any>,
opts: EngineListOpts,
): Promise<CursorPaginationResult<BaseRow>> {
let qb = base;
if (opts.search) {
const spec = opts.search;
qb = qb.where((eb) => buildSearch(eb, spec));
}
if (opts.filter) {
const filter = opts.filter;
qb = qb.where((eb) => buildWhere(eb, filter, opts.schema));
}
const sortBuilds: SortBuild[] =
opts.sorts && opts.sorts.length > 0
? buildSorts(opts.sorts, opts.schema)
: [];
for (const sb of sortBuilds) {
qb = qb.select(sb.expression.as(sb.key)) as SelectQueryBuilder<
DB,
'baseRows',
any
>;
}
const cursor = makeCursor(sortBuilds, CURSOR_TAIL_KEYS);
const fields = [
...sortBuilds.map((sb) => ({
expression: sb.expression,
direction: sb.direction,
key: sb.key,
})),
{
expression: 'position' as const,
direction: 'asc' as const,
key: 'position' as const,
},
{
expression: 'id' as const,
direction: 'asc' as const,
key: 'id' as const,
},
];
return executeWithCursorPagination(qb as any, {
perPage: opts.pagination.limit,
cursor: opts.pagination.cursor,
beforeCursor: opts.pagination.beforeCursor,
fields: fields as any,
encodeCursor: cursor.encodeCursor as any,
decodeCursor: cursor.decodeCursor as any,
parseCursor: cursor.parseCursor as any,
}) as unknown as Promise<CursorPaginationResult<BaseRow>>;
}
@@ -0,0 +1,32 @@
import { sql, RawBuilder } from 'kysely';
/*
* Parameterised extractors wrapping the SQL helper functions installed
* by the bases-hardening migration. PropertyId always binds as a
* parameter — never string-interpolated. These replace every
* `sql.raw('cells->>...')` site in the old repo.
*/
export function textCell(propertyId: string): RawBuilder<string> {
return sql<string>`base_cell_text(cells, ${propertyId}::uuid)`;
}
export function numericCell(propertyId: string): RawBuilder<number> {
return sql<number>`base_cell_numeric(cells, ${propertyId}::uuid)`;
}
export function dateCell(propertyId: string): RawBuilder<Date> {
return sql<Date>`base_cell_timestamptz(cells, ${propertyId}::uuid)`;
}
export function boolCell(propertyId: string): RawBuilder<boolean> {
return sql<boolean>`base_cell_bool(cells, ${propertyId}::uuid)`;
}
export function arrayCell(propertyId: string): RawBuilder<unknown> {
return sql<unknown>`base_cell_array(cells, ${propertyId}::uuid)`;
}
export function escapeIlike(value: string): string {
return value.replace(/[%_\\]/g, '\\$&');
}
+44
View File
@@ -0,0 +1,44 @@
export {
MAX_FILTER_DEPTH,
MAX_FILTER_NODES,
MAX_SORTS,
conditionSchema,
filterGroupSchema,
filterNodeSchema,
listQuerySchema,
operatorSchema,
searchSchema,
sortSpecSchema,
sortsSchema,
validateFilterTree,
} from './schema.zod';
export type {
Condition,
FilterGroup,
FilterNode,
ListQuery,
Operator,
SearchSpec,
SortSpec,
} from './schema.zod';
export {
PropertyKind,
SYSTEM_COLUMN,
isSystemType,
propertyKind,
} from './kinds';
export type { PropertyKindValue } from './kinds';
export { buildWhere } from './predicate';
export type { PropertySchema } from './predicate';
export { buildSorts, CURSOR_TAIL_KEYS } from './sort';
export type { SortBuild, TailKey } from './sort';
export { makeCursor } from './cursor';
export { buildSearch } from './search';
export { runListQuery } from './engine';
export type { EngineListOpts } from './engine';
+57
View File
@@ -0,0 +1,57 @@
import { BasePropertyType } from '../base.schemas';
export const PropertyKind = {
TEXT: 'text',
NUMERIC: 'numeric',
DATE: 'date',
BOOL: 'bool',
SELECT: 'select',
MULTI: 'multi',
PERSON: 'person',
FILE: 'file',
SYS_USER: 'sys_user',
} as const;
export type PropertyKindValue = (typeof PropertyKind)[keyof typeof PropertyKind];
export function propertyKind(type: string): PropertyKindValue | null {
switch (type) {
case BasePropertyType.TEXT:
case BasePropertyType.URL:
case BasePropertyType.EMAIL:
return PropertyKind.TEXT;
case BasePropertyType.NUMBER:
return PropertyKind.NUMERIC;
case BasePropertyType.DATE:
case BasePropertyType.CREATED_AT:
case BasePropertyType.LAST_EDITED_AT:
return PropertyKind.DATE;
case BasePropertyType.CHECKBOX:
return PropertyKind.BOOL;
case BasePropertyType.SELECT:
case BasePropertyType.STATUS:
return PropertyKind.SELECT;
case BasePropertyType.MULTI_SELECT:
return PropertyKind.MULTI;
case BasePropertyType.PERSON:
return PropertyKind.PERSON;
case BasePropertyType.FILE:
return PropertyKind.FILE;
case BasePropertyType.LAST_EDITED_BY:
return PropertyKind.SYS_USER;
default:
return null;
}
}
// System property type → camelCase column name on `base_rows`.
// Kysely camel-case plugin maps to snake_case in SQL.
export const SYSTEM_COLUMN: Record<string, 'createdAt' | 'updatedAt' | 'lastUpdatedById'> = {
[BasePropertyType.CREATED_AT]: 'createdAt',
[BasePropertyType.LAST_EDITED_AT]: 'updatedAt',
[BasePropertyType.LAST_EDITED_BY]: 'lastUpdatedById',
};
export function isSystemType(type: string): boolean {
return type in SYSTEM_COLUMN;
}
@@ -0,0 +1,404 @@
import { Expression, ExpressionBuilder, sql, SqlBool } from 'kysely';
import { DB } from '@docmost/db/types/db';
import { BaseProperty } from '@docmost/db/types/entity.types';
import { Condition, FilterNode } from './schema.zod';
import { PropertyKind, propertyKind, SYSTEM_COLUMN } from './kinds';
import {
arrayCell,
boolCell,
dateCell,
escapeIlike,
numericCell,
textCell,
} from './extractors';
export type PropertySchema = Map<
string,
Pick<BaseProperty, 'id' | 'type' | 'typeOptions'>
>;
type Eb = ExpressionBuilder<DB, 'baseRows'>;
const TRUE = sql<SqlBool>`TRUE`;
const FALSE = sql<SqlBool>`FALSE`;
export function buildWhere(
eb: Eb,
node: FilterNode,
schema: PropertySchema,
): Expression<SqlBool> {
if ('children' in node) {
if (node.children.length === 0) return TRUE;
const built = node.children.map((c) => buildWhere(eb, c, schema));
return node.op === 'and' ? eb.and(built) : eb.or(built);
}
return buildCondition(eb, node, schema);
}
function buildCondition(
eb: Eb,
cond: Condition,
schema: PropertySchema,
): Expression<SqlBool> {
const prop = schema.get(cond.propertyId);
if (!prop) return FALSE;
const sysCol = SYSTEM_COLUMN[prop.type];
if (sysCol) return systemCondition(eb, sysCol, prop.type, cond);
const kind = propertyKind(prop.type);
if (!kind) return FALSE;
switch (kind) {
case PropertyKind.TEXT:
return textCondition(eb, cond);
case PropertyKind.NUMERIC:
return numericCondition(eb, cond);
case PropertyKind.DATE:
return dateCondition(eb, cond);
case PropertyKind.BOOL:
return boolCondition(eb, cond);
case PropertyKind.SELECT:
return selectCondition(eb, cond);
case PropertyKind.MULTI:
return multiCondition(eb, cond);
case PropertyKind.PERSON:
return personCondition(eb, cond, prop);
case PropertyKind.FILE:
return arrayOfIdsCondition(eb, cond);
default:
return FALSE;
}
}
// --- per-kind handlers ------------------------------------------------
function textCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = textCell(cond.propertyId);
const val = cond.value;
switch (cond.op) {
case 'isEmpty':
return eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '=', ''),
]);
case 'isNotEmpty':
return eb.and([
eb(expr as any, 'is not', null),
eb(expr as any, '!=', ''),
]);
case 'eq':
return val == null ? FALSE : eb(expr as any, '=', String(val));
case 'neq':
return val == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', String(val)),
]);
case 'contains':
return val == null
? FALSE
: eb(expr as any, 'ilike', `%${escapeIlike(String(val))}%`);
case 'ncontains':
return val == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, 'not ilike', `%${escapeIlike(String(val))}%`),
]);
case 'startsWith':
return val == null
? FALSE
: eb(expr as any, 'ilike', `${escapeIlike(String(val))}%`);
case 'endsWith':
return val == null
? FALSE
: eb(expr as any, 'ilike', `%${escapeIlike(String(val))}`);
default:
return FALSE;
}
}
function numericCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = numericCell(cond.propertyId);
const raw = cond.value;
const num = raw == null ? null : Number(raw);
const bad = num == null || Number.isNaN(num);
switch (cond.op) {
case 'isEmpty':
return eb(expr as any, 'is', null);
case 'isNotEmpty':
return eb(expr as any, 'is not', null);
case 'eq':
return bad ? FALSE : eb(expr as any, '=', num);
case 'neq':
return bad
? FALSE
: eb.or([eb(expr as any, 'is', null), eb(expr as any, '!=', num)]);
case 'gt':
return bad ? FALSE : eb(expr as any, '>', num);
case 'gte':
return bad ? FALSE : eb(expr as any, '>=', num);
case 'lt':
return bad ? FALSE : eb(expr as any, '<', num);
case 'lte':
return bad ? FALSE : eb(expr as any, '<=', num);
default:
return FALSE;
}
}
function dateCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = dateCell(cond.propertyId);
const raw = cond.value;
const bad = raw == null || raw === '';
switch (cond.op) {
case 'isEmpty':
return eb(expr as any, 'is', null);
case 'isNotEmpty':
return eb(expr as any, 'is not', null);
case 'eq':
return bad ? FALSE : eb(expr as any, '=', String(raw));
case 'neq':
return bad
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', String(raw)),
]);
case 'before':
return bad ? FALSE : eb(expr as any, '<', String(raw));
case 'after':
return bad ? FALSE : eb(expr as any, '>', String(raw));
case 'onOrBefore':
return bad ? FALSE : eb(expr as any, '<=', String(raw));
case 'onOrAfter':
return bad ? FALSE : eb(expr as any, '>=', String(raw));
default:
return FALSE;
}
}
function boolCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = boolCell(cond.propertyId);
switch (cond.op) {
case 'isEmpty':
return eb(expr as any, 'is', null);
case 'isNotEmpty':
return eb(expr as any, 'is not', null);
case 'eq':
return cond.value == null
? FALSE
: eb(expr as any, '=', Boolean(cond.value));
case 'neq':
return cond.value == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', Boolean(cond.value)),
]);
default:
return FALSE;
}
}
function selectCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
// Cell stores a single option UUID as string. Use text extractor.
const expr = textCell(cond.propertyId);
const val = cond.value;
switch (cond.op) {
case 'isEmpty':
return eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '=', ''),
]);
case 'isNotEmpty':
return eb.and([
eb(expr as any, 'is not', null),
eb(expr as any, '!=', ''),
]);
case 'eq':
return val == null ? FALSE : eb(expr as any, '=', String(val));
case 'neq':
return val == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', String(val)),
]);
case 'any': {
const arr = asStringArray(val);
if (arr.length === 0) return FALSE;
return eb(expr as any, 'in', arr);
}
case 'none': {
const arr = asStringArray(val);
if (arr.length === 0) return TRUE;
return eb.or([
eb(expr as any, 'is', null),
eb(expr as any, 'not in', arr),
]);
}
default:
return FALSE;
}
}
function multiCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
return arrayOfIdsCondition(eb, cond);
}
function personCondition(
eb: Eb,
cond: Condition,
prop: Pick<BaseProperty, 'id' | 'type' | 'typeOptions'>,
): Expression<SqlBool> {
// Person cells may be stored as a single uuid or an array of uuids depending
// on the property's `allowMultiple` option. Normalise to array semantics via
// `base_cell_array` when it's stored as an array, else text.
const allowMultiple = !!(prop.typeOptions as any)?.allowMultiple;
if (allowMultiple) return arrayOfIdsCondition(eb, cond);
const expr = textCell(cond.propertyId);
const val = cond.value;
switch (cond.op) {
case 'isEmpty':
return eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '=', ''),
]);
case 'isNotEmpty':
return eb.and([
eb(expr as any, 'is not', null),
eb(expr as any, '!=', ''),
]);
case 'eq':
return val == null ? FALSE : eb(expr as any, '=', String(val));
case 'neq':
return val == null
? FALSE
: eb.or([
eb(expr as any, 'is', null),
eb(expr as any, '!=', String(val)),
]);
case 'any': {
const arr = asStringArray(val);
if (arr.length === 0) return FALSE;
return eb(expr as any, 'in', arr);
}
default:
return FALSE;
}
}
function arrayOfIdsCondition(eb: Eb, cond: Condition): Expression<SqlBool> {
const expr = arrayCell(cond.propertyId);
const val = cond.value;
switch (cond.op) {
case 'isEmpty':
return eb.or([
eb(expr as any, 'is', null),
sql<SqlBool>`jsonb_array_length(${expr}) = 0`,
]);
case 'isNotEmpty':
return eb.and([
eb(expr as any, 'is not', null),
sql<SqlBool>`jsonb_array_length(${expr}) > 0`,
]);
case 'any': {
const arr = asStringArray(val);
if (arr.length === 0) return FALSE;
return sql<SqlBool>`${expr} ?| ${arr}`;
}
case 'all': {
const arr = asStringArray(val);
if (arr.length === 0) return TRUE;
// `::text::jsonb` because postgres.js auto-detects JSON-shaped strings
// as jsonb and re-encodes them, producing a jsonb *string* instead of
// an array. Without the text hop, the containment check never matches.
return sql<SqlBool>`${expr} @> ${JSON.stringify(arr)}::text::jsonb`;
}
case 'none': {
const arr = asStringArray(val);
if (arr.length === 0) return TRUE;
return eb.or([
eb(expr as any, 'is', null),
sql<SqlBool>`NOT (${expr} ?| ${arr})`,
]);
}
default:
return FALSE;
}
}
function systemCondition(
eb: Eb,
column: 'createdAt' | 'updatedAt' | 'lastUpdatedById',
propertyType: string,
cond: Condition,
): Expression<SqlBool> {
const ref = eb.ref(column);
const val = cond.value;
// lastEditedBy — UUID column; behaves like select (uuid equality, in, isEmpty).
if (propertyType === 'lastEditedBy') {
switch (cond.op) {
case 'isEmpty':
return eb(ref, 'is', null);
case 'isNotEmpty':
return eb(ref, 'is not', null);
case 'eq':
return val == null ? FALSE : eb(ref, '=', String(val));
case 'neq':
return val == null
? FALSE
: eb.or([eb(ref, 'is', null), eb(ref, '!=', String(val))]);
case 'any': {
const arr = asStringArray(val);
if (arr.length === 0) return FALSE;
return eb(ref, 'in', arr);
}
case 'none': {
const arr = asStringArray(val);
if (arr.length === 0) return TRUE;
return eb.or([eb(ref, 'is', null), eb(ref, 'not in', arr)]);
}
default:
return FALSE;
}
}
// createdAt / updatedAt — timestamptz columns (NOT NULL).
const bad = val == null || val === '';
switch (cond.op) {
case 'isEmpty':
return FALSE;
case 'isNotEmpty':
return TRUE;
case 'eq':
return bad ? FALSE : eb(ref, '=', String(val));
case 'neq':
return bad ? FALSE : eb(ref, '!=', String(val));
case 'before':
return bad ? FALSE : eb(ref, '<', String(val));
case 'after':
return bad ? FALSE : eb(ref, '>', String(val));
case 'onOrBefore':
return bad ? FALSE : eb(ref, '<=', String(val));
case 'onOrAfter':
return bad ? FALSE : eb(ref, '>=', String(val));
default:
return FALSE;
}
}
// --- utilities --------------------------------------------------------
function asStringArray(val: unknown): string[] {
if (val == null) return [];
if (Array.isArray(val)) return val.filter((v) => v != null).map(String);
return [String(val)];
}
export { TRUE as TRUE_EXPR, FALSE as FALSE_EXPR };
@@ -0,0 +1,100 @@
import { z } from 'zod';
export const MAX_FILTER_DEPTH = 5;
export const MAX_FILTER_NODES = 50;
export const MAX_SORTS = 5;
const uuid = z.uuid();
export const operatorSchema = z.enum([
'eq',
'neq',
'gt',
'gte',
'lt',
'lte',
'contains',
'ncontains',
'startsWith',
'endsWith',
'isEmpty',
'isNotEmpty',
'before',
'after',
'onOrBefore',
'onOrAfter',
'any',
'none',
'all',
]);
export type Operator = z.infer<typeof operatorSchema>;
export const conditionSchema = z.object({
propertyId: uuid,
op: operatorSchema,
value: z.unknown().optional(),
});
export type Condition = z.infer<typeof conditionSchema>;
export type FilterNode = Condition | FilterGroup;
export type FilterGroup = {
op: 'and' | 'or';
children: FilterNode[];
};
// Recursive Zod schema for grouped filter trees.
export const filterNodeSchema: z.ZodType<FilterNode> = z.lazy(() =>
z.union([conditionSchema, filterGroupSchema]),
);
export const filterGroupSchema: z.ZodType<FilterGroup> = z.lazy(() =>
z.object({
op: z.enum(['and', 'or']),
children: z.array(filterNodeSchema),
}),
);
// Count nodes + max depth to prevent pathological trees from reaching SQL.
export function validateFilterTree(node: FilterNode): void {
let nodes = 0;
const walk = (n: FilterNode, depth: number) => {
if (depth > MAX_FILTER_DEPTH) {
throw new Error(`Filter tree exceeds max depth ${MAX_FILTER_DEPTH}`);
}
nodes += 1;
if (nodes > MAX_FILTER_NODES) {
throw new Error(`Filter tree exceeds max node count ${MAX_FILTER_NODES}`);
}
if ('children' in n) {
for (const c of n.children) walk(c, depth + 1);
}
};
walk(node, 0);
}
export const sortSpecSchema = z.object({
propertyId: uuid,
direction: z.enum(['asc', 'desc']),
});
export type SortSpec = z.infer<typeof sortSpecSchema>;
export const sortsSchema = z.array(sortSpecSchema).max(MAX_SORTS);
export const searchSchema = z.object({
query: z.string().min(1).max(500),
mode: z.enum(['trgm', 'fts']).default('trgm'),
});
export type SearchSpec = z.infer<typeof searchSchema>;
// Top-level request DTO shape. The row controller DTO composes this.
export const listQuerySchema = z.object({
filter: filterGroupSchema.optional(),
sorts: sortsSchema.optional(),
search: searchSchema.optional(),
});
export type ListQuery = z.infer<typeof listQuerySchema>;
@@ -0,0 +1,27 @@
import { Expression, ExpressionBuilder, sql, SqlBool } from 'kysely';
import { DB } from '@docmost/db/types/db';
import { SearchSpec } from './schema.zod';
type Eb = ExpressionBuilder<DB, 'baseRows'>;
/*
* `search_text` and `search_tsv` are maintained by the base_rows search
* trigger installed in the bases-hardening migration. Both columns are
* indexed — pg_trgm GIN for ILIKE and standard GIN for tsvector.
*/
export function buildSearch(eb: Eb, spec: SearchSpec): Expression<SqlBool> {
const q = spec.query.trim();
if (!q) return sql<SqlBool>`TRUE`;
if (spec.mode === 'fts') {
// Accent-insensitive match via f_unaccent (same helper the search
// trigger uses when populating search_tsv / search_text).
return sql<SqlBool>`search_tsv @@ plainto_tsquery('english', f_unaccent(${q}))`;
}
// trigram ILIKE mode (default). escape %/_/\\ in user input so wildcards
// can't be injected.
const escaped = q.replace(/[%_\\]/g, '\\$&');
return sql<SqlBool>`search_text ILIKE ${'%' + escaped + '%'}`;
}
+112
View File
@@ -0,0 +1,112 @@
import { RawBuilder, sql } from 'kysely';
import { BaseProperty } from '@docmost/db/types/entity.types';
import { SortSpec } from './schema.zod';
import { PropertyKind, SYSTEM_COLUMN, propertyKind } from './kinds';
import {
boolCell,
dateCell,
numericCell,
textCell,
} from './extractors';
import { PropertySchema } from './predicate';
/*
* Builds sort expressions with sentinel wrapping so NULLs compare
* deterministically at the end of the sort order. This avoids the
* `__null__` string sentinel bug in the old cursor encoder: because the
* sort expression never returns NULL, the cursor simply stores the
* extracted value and keyset comparisons work natively.
*/
export type SortBuild = {
key: string; // alias used in cursor (s0, s1, ...)
expression: RawBuilder<any>; // COALESCE-wrapped expression with sentinel
direction: 'asc' | 'desc';
valueType: 'numeric' | 'date' | 'text' | 'bool';
};
export type TailKey = 'position' | 'id';
export const CURSOR_TAIL_KEYS: TailKey[] = ['position', 'id'];
export function buildSorts(
sorts: SortSpec[],
schema: PropertySchema,
): SortBuild[] {
const out: SortBuild[] = [];
for (let i = 0; i < sorts.length; i++) {
const s = sorts[i];
const prop = schema.get(s.propertyId);
if (!prop) continue;
const key = `s${i}`;
const dir = s.direction;
const sysCol = SYSTEM_COLUMN[prop.type];
if (sysCol) {
out.push({
key,
expression: sql`${sql.ref(sysCol)}`,
direction: dir,
valueType: prop.type === 'lastEditedBy' ? 'text' : 'date',
});
continue;
}
const kind = propertyKind(prop.type);
if (!kind) continue;
out.push(wrapWithSentinel(s.propertyId, kind, dir, key));
}
return out;
}
function wrapWithSentinel(
propertyId: string,
kind: Exclude<ReturnType<typeof propertyKind>, null>,
direction: 'asc' | 'desc',
key: string,
): SortBuild {
if (kind === PropertyKind.NUMERIC) {
const sentinel =
direction === 'asc'
? sql`'Infinity'::numeric`
: sql`'-Infinity'::numeric`;
return {
key,
expression: sql`COALESCE(${numericCell(propertyId)}, ${sentinel})`,
direction,
valueType: 'numeric',
};
}
if (kind === PropertyKind.DATE) {
const sentinel =
direction === 'asc'
? sql`'infinity'::timestamptz`
: sql`'-infinity'::timestamptz`;
return {
key,
expression: sql`COALESCE(${dateCell(propertyId)}, ${sentinel})`,
direction,
valueType: 'date',
};
}
if (kind === PropertyKind.BOOL) {
// false < true. ASC NULLS LAST => null → true; DESC NULLS LAST => null → false.
const sentinel = direction === 'asc' ? sql`TRUE` : sql`FALSE`;
return {
key,
expression: sql`COALESCE(${boolCell(propertyId)}, ${sentinel})`,
direction,
valueType: 'bool',
};
}
// TEXT / SELECT / MULTI / PERSON / FILE — sort by raw extracted text.
const sentinel = direction === 'asc' ? sql`chr(1114111)` : sql`''`;
return {
key,
expression: sql`COALESCE(${textCell(propertyId)}, ${sentinel})`,
direction,
valueType: 'text',
};
}
@@ -0,0 +1,47 @@
import { BaseProperty, BaseRow, BaseView } from '@docmost/db/types/entity.types';
/*
* Domain event payloads emitted by the base services after each mutation
* commits. `base-ws-consumers.ts` picks these up and fans them out onto
* the appropriate socket.io room. `requestId` lets the originating client
* skip replaying its own echo.
*/
type BaseEventBase = {
baseId: string;
workspaceId: string;
actorId?: string | null;
requestId?: string | null;
};
export type BaseRowCreatedEvent = BaseEventBase & { row: BaseRow };
export type BaseRowUpdatedEvent = BaseEventBase & {
rowId: string;
patch: Record<string, unknown>;
updatedCells: Record<string, unknown>;
};
export type BaseRowDeletedEvent = BaseEventBase & { rowId: string };
export type BaseRowRestoredEvent = BaseEventBase & { rowId: string };
export type BaseRowReorderedEvent = BaseEventBase & {
rowId: string;
position: string;
};
export type BasePropertyCreatedEvent = BaseEventBase & {
property: BaseProperty;
};
export type BasePropertyUpdatedEvent = BaseEventBase & {
property: BaseProperty;
schemaVersion: number;
};
export type BasePropertyDeletedEvent = BaseEventBase & { propertyId: string };
export type BasePropertyReorderedEvent = BaseEventBase & {
propertyId: string;
position: string;
};
export type BaseViewCreatedEvent = BaseEventBase & { view: BaseView };
export type BaseViewUpdatedEvent = BaseEventBase & { view: BaseView };
export type BaseViewDeletedEvent = BaseEventBase & { viewId: string };
export type BaseSchemaBumpedEvent = BaseEventBase & { schemaVersion: number };
@@ -0,0 +1,177 @@
import { Logger, OnModuleDestroy } from '@nestjs/common';
import { OnWorkerEvent, Processor, WorkerHost } from '@nestjs/bullmq';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { Job } from 'bullmq';
import { InjectKysely } from 'nestjs-kysely';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { executeTx } from '@docmost/db/utils';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
import {
IBaseCellGcJob,
IBaseTypeConversionJob,
} from '../../../integrations/queue/constants/queue.interface';
import { processBaseTypeConversion } from '../tasks/base-type-conversion.task';
import { processBaseCellGc } from '../tasks/base-cell-gc.task';
import { EventName } from '../../../common/events/event.contants';
import {
BasePropertyUpdatedEvent,
BaseSchemaBumpedEvent,
} from '../events/base-events';
@Processor(QueueName.BASE_QUEUE)
export class BaseQueueProcessor
extends WorkerHost
implements OnModuleDestroy
{
private readonly logger = new Logger(BaseQueueProcessor.name);
constructor(
@InjectKysely() private readonly db: KyselyDB,
private readonly baseRowRepo: BaseRowRepo,
private readonly basePropertyRepo: BasePropertyRepo,
private readonly baseRepo: BaseRepo,
private readonly eventEmitter: EventEmitter2,
) {
super();
}
async process(job: Job): Promise<unknown> {
switch (job.name) {
case QueueJob.BASE_TYPE_CONVERSION: {
const data = job.data as IBaseTypeConversionJob;
// Cell rewrite + pending→live swap + schema_version bump share one
// transaction so readers never see cells already in the new format
// under a still-pending type (or vice versa).
const { summary, schemaVersion } = await executeTx(
this.db,
async (trx) => {
const s = await processBaseTypeConversion(
this.db,
this.baseRowRepo,
data,
{
trx,
progress: (processed) => job.updateProgress({ processed }),
},
);
await this.basePropertyRepo.commitPendingTypeChange(
data.propertyId,
trx,
);
await this.basePropertyRepo.bumpSchemaVersion(data.propertyId, trx);
const v = await this.baseRepo.bumpSchemaVersion(data.baseId, trx);
return { summary: s, schemaVersion: v };
},
);
// Emit the property:updated first so clients drop the "Converting…"
// badge and repaint headers with the new type, then schema:bumped
// so they invalidate row caches to pick up migrated cells.
const updated = await this.basePropertyRepo.findById(data.propertyId);
if (updated) {
const event: BasePropertyUpdatedEvent = {
baseId: data.baseId,
workspaceId: data.workspaceId,
actorId: data.actorId ?? null,
requestId: null,
property: updated,
schemaVersion: updated.schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
}
this.emitSchemaBumped(
data.baseId,
data.workspaceId,
schemaVersion,
data.actorId,
);
return summary;
}
case QueueJob.BASE_CELL_GC: {
const data = job.data as IBaseCellGcJob;
await processBaseCellGc(
this.db,
this.baseRowRepo,
this.basePropertyRepo,
data,
);
const schemaVersion = await this.baseRepo.bumpSchemaVersion(
data.baseId,
);
this.emitSchemaBumped(data.baseId, data.workspaceId, schemaVersion);
return;
}
default:
this.logger.warn(`Unknown job: ${job.name}`);
}
}
private emitSchemaBumped(
baseId: string,
workspaceId: string,
schemaVersion: number,
actorId?: string,
): void {
const event: BaseSchemaBumpedEvent = {
baseId,
workspaceId,
actorId: actorId ?? null,
requestId: null,
schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_SCHEMA_BUMPED, event);
}
@OnWorkerEvent('active')
onActive(job: Job) {
this.logger.debug(`Processing ${job.name} job ${job.id}`);
}
@OnWorkerEvent('failed')
async onError(job: Job) {
this.logger.error(
`Error processing ${job.name} job ${job.id}. Reason: ${job.failedReason}`,
);
// Clean up a stuck conversion so the column doesn't wedge in
// "Converting…" forever. Cells remain under the original type because
// the rewrite transaction rolled back.
if (job.name === QueueJob.BASE_TYPE_CONVERSION) {
const data = job.data as IBaseTypeConversionJob;
try {
await this.basePropertyRepo.clearPendingTypeChange(data.propertyId);
const reverted = await this.basePropertyRepo.findById(data.propertyId);
if (reverted) {
const event: BasePropertyUpdatedEvent = {
baseId: data.baseId,
workspaceId: data.workspaceId,
actorId: data.actorId ?? null,
requestId: null,
property: reverted,
schemaVersion: reverted.schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
}
} catch (cleanupErr) {
this.logger.error(
`Failed to clear pending type change on property ${data.propertyId}`,
cleanupErr as Error,
);
}
}
}
@OnWorkerEvent('completed')
onCompleted(job: Job) {
this.logger.debug(`Completed ${job.name} job ${job.id}`);
}
async onModuleDestroy(): Promise<void> {
if (this.worker) {
await this.worker.close();
}
}
}
@@ -0,0 +1,74 @@
import { Injectable, Logger } from '@nestjs/common';
import { RedisService } from '@nestjs-labs/nestjs-ioredis';
import type { Redis } from 'ioredis';
const PRESENCE_KEY_PREFIX = 'presence:base:';
const PRESENCE_ENTRY_TTL_MS = 10_000;
const PRESENCE_KEY_TTL_S = 60;
export type PresenceEntry = {
userId: string;
cellId?: string | null;
selection?: unknown;
ts: number;
};
/*
* Ephemeral per-base presence. No DB. `presence:base:{baseId}` is a Redis
* HASH keyed by userId with a JSON-serialised entry. Entries older than
* PRESENCE_ENTRY_TTL_MS are filtered on read; the key itself is refreshed
* with a longer Redis EXPIRE on every write so unused rooms drain on
* their own.
*/
@Injectable()
export class BasePresenceService {
private readonly logger = new Logger(BasePresenceService.name);
private readonly redis: Redis;
constructor(private readonly redisService: RedisService) {
this.redis = this.redisService.getOrThrow();
}
async setPresence(
baseId: string,
entry: PresenceEntry,
): Promise<void> {
const key = PRESENCE_KEY_PREFIX + baseId;
await this.redis
.multi()
.hset(key, entry.userId, JSON.stringify(entry))
.expire(key, PRESENCE_KEY_TTL_S)
.exec();
}
async leave(baseId: string, userId: string): Promise<void> {
const key = PRESENCE_KEY_PREFIX + baseId;
await this.redis.hdel(key, userId);
}
async snapshot(baseId: string): Promise<PresenceEntry[]> {
const key = PRESENCE_KEY_PREFIX + baseId;
const raw = await this.redis.hgetall(key);
const now = Date.now();
const out: PresenceEntry[] = [];
const stale: string[] = [];
for (const [field, value] of Object.entries(raw)) {
try {
const entry = JSON.parse(value) as PresenceEntry;
if (now - entry.ts <= PRESENCE_ENTRY_TTL_MS) {
out.push(entry);
} else {
stale.push(field);
}
} catch {
stale.push(field);
}
}
// Opportunistic GC so the hash doesn't accumulate during long-lived
// rooms where the key TTL keeps getting refreshed by active users.
if (stale.length > 0) {
this.redis.hdel(key, ...stale).catch(() => {});
}
return out;
}
}
@@ -0,0 +1,165 @@
import { Injectable, Logger } from '@nestjs/common';
import { OnEvent } from '@nestjs/event-emitter';
import { EventName } from '../../../common/events/event.contants';
import { BaseWsService } from './base-ws.service';
import {
BasePropertyCreatedEvent,
BasePropertyDeletedEvent,
BasePropertyReorderedEvent,
BasePropertyUpdatedEvent,
BaseRowCreatedEvent,
BaseRowDeletedEvent,
BaseRowReorderedEvent,
BaseRowUpdatedEvent,
BaseSchemaBumpedEvent,
BaseViewCreatedEvent,
BaseViewDeletedEvent,
BaseViewUpdatedEvent,
} from '../events/base-events';
/*
* In-process listeners that forward base domain events onto the
* `base-{baseId}` socket.io room. Originating clients suppress their own
* echoes via `requestId`.
*/
@Injectable()
export class BaseWsConsumers {
private readonly logger = new Logger(BaseWsConsumers.name);
constructor(private readonly ws: BaseWsService) {}
@OnEvent(EventName.BASE_ROW_CREATED)
onRowCreated(e: BaseRowCreatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:row:created',
baseId: e.baseId,
row: e.row,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_ROW_UPDATED)
onRowUpdated(e: BaseRowUpdatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:row:updated',
baseId: e.baseId,
rowId: e.rowId,
patch: e.patch,
updatedCells: e.updatedCells,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_ROW_DELETED)
onRowDeleted(e: BaseRowDeletedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:row:deleted',
baseId: e.baseId,
rowId: e.rowId,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_ROW_REORDERED)
onRowReordered(e: BaseRowReorderedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:row:reordered',
baseId: e.baseId,
rowId: e.rowId,
position: e.position,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_PROPERTY_CREATED)
onPropertyCreated(e: BasePropertyCreatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:property:created',
baseId: e.baseId,
property: e.property,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_PROPERTY_UPDATED)
onPropertyUpdated(e: BasePropertyUpdatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:property:updated',
baseId: e.baseId,
property: e.property,
schemaVersion: e.schemaVersion,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_PROPERTY_DELETED)
onPropertyDeleted(e: BasePropertyDeletedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:property:deleted',
baseId: e.baseId,
propertyId: e.propertyId,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_PROPERTY_REORDERED)
onPropertyReordered(e: BasePropertyReorderedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:property:reordered',
baseId: e.baseId,
propertyId: e.propertyId,
position: e.position,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_VIEW_CREATED)
onViewCreated(e: BaseViewCreatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:view:created',
baseId: e.baseId,
view: e.view,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_VIEW_UPDATED)
onViewUpdated(e: BaseViewUpdatedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:view:updated',
baseId: e.baseId,
view: e.view,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_VIEW_DELETED)
onViewDeleted(e: BaseViewDeletedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:view:deleted',
baseId: e.baseId,
viewId: e.viewId,
actorId: e.actorId ?? null,
requestId: e.requestId ?? null,
});
}
@OnEvent(EventName.BASE_SCHEMA_BUMPED)
onSchemaBumped(e: BaseSchemaBumpedEvent) {
this.ws.emitToBase(e.baseId, {
operation: 'base:schema:bumped',
baseId: e.baseId,
schemaVersion: e.schemaVersion,
});
}
}
@@ -0,0 +1,233 @@
import { Injectable, Logger } from '@nestjs/common';
import { z } from 'zod';
import type { Server, Socket } from 'socket.io';
import { SpaceMemberRepo } from '@docmost/db/repos/space/space-member.repo';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { findHighestUserSpaceRole } from '@docmost/db/repos/space/utils';
import { getBaseRoomName } from '../../../ws/ws.utils';
import { BasePresenceService, PresenceEntry } from './base-presence.service';
/*
* Inbound shapes from untrusted socket clients. Zod-validated at the
* boundary so malformed payloads (non-uuid baseId, missing fields,
* oversized selection blobs) never reach the permission check or Redis.
*/
const baseSubscribeSchema = z.object({
operation: z.literal('base:subscribe'),
baseId: z.uuid(),
});
const baseUnsubscribeSchema = z.object({
operation: z.literal('base:unsubscribe'),
baseId: z.uuid(),
});
const basePresenceSchema = z.object({
operation: z.literal('base:presence'),
baseId: z.uuid(),
cellId: z.string().max(200).optional().nullable(),
selection: z.unknown().optional(),
});
const basePresenceLeaveSchema = z.object({
operation: z.literal('base:presence:leave'),
baseId: z.uuid(),
});
const inboundSchema = z.union([
baseSubscribeSchema,
baseUnsubscribeSchema,
basePresenceSchema,
basePresenceLeaveSchema,
]);
type BaseInbound = z.infer<typeof inboundSchema>;
type BaseOutbound = { operation: `base:${string}` } & Record<string, unknown>;
@Injectable()
export class BaseWsService {
private readonly logger = new Logger(BaseWsService.name);
private server: Server | null = null;
constructor(
private readonly baseRepo: BaseRepo,
private readonly spaceMemberRepo: SpaceMemberRepo,
private readonly presence: BasePresenceService,
) {}
setServer(server: Server): void {
this.server = server;
}
isBaseEvent(data: any): boolean {
return (
typeof data?.operation === 'string' && data.operation.startsWith('base:')
);
}
async handleInbound(client: Socket, raw: unknown): Promise<void> {
const parsed = inboundSchema.safeParse(raw);
if (!parsed.success) {
this.logger.debug(
`Rejecting inbound base event: ${parsed.error.issues[0]?.message}`,
);
return;
}
const data = parsed.data;
switch (data.operation) {
case 'base:subscribe':
await this.subscribe(client, data.baseId);
return;
case 'base:unsubscribe':
await this.unsubscribe(client, data.baseId);
return;
case 'base:presence':
await this.handlePresence(client, data);
return;
case 'base:presence:leave':
await this.handlePresenceLeave(client, data.baseId);
return;
}
}
emitToBase(baseId: string, payload: BaseOutbound): void {
if (!this.server) return;
this.server.to(getBaseRoomName(baseId)).emit('message', payload);
}
/*
* Called from WsGateway on client disconnect. Walks the per-socket
* set of subscribed bases and cleans up presence without waiting for
* entry TTLs to expire — keeps the snapshot fresh for others in the
* room.
*/
async handleDisconnect(client: Socket): Promise<void> {
const userId = client.data?.userId as string | undefined;
const subs = this.subscriptionsFor(client);
if (!userId || subs.size === 0) return;
for (const baseId of subs) {
await this.presence.leave(baseId, userId);
this.emitToBase(baseId, {
operation: 'base:presence:leave',
baseId,
userId,
});
}
subs.clear();
}
// --- private -------------------------------------------------------
private async subscribe(client: Socket, baseId: string): Promise<void> {
const userId = client.data?.userId as string | undefined;
if (!userId) {
client.emit('message', {
operation: 'base:subscribe:error',
baseId,
reason: 'unauthenticated',
});
return;
}
const base = await this.baseRepo.findById(baseId);
if (!base) {
client.emit('message', {
operation: 'base:subscribe:error',
baseId,
reason: 'not_found',
});
return;
}
const canRead = await this.canReadBaseSpace(userId, base.spaceId);
if (!canRead) {
client.emit('message', {
operation: 'base:subscribe:error',
baseId,
reason: 'forbidden',
});
return;
}
client.join(getBaseRoomName(baseId));
this.subscriptionsFor(client).add(baseId);
// Send the current presence snapshot to just this client so their UI
// can paint who's already editing what.
const snapshot = await this.presence.snapshot(baseId);
client.emit('message', {
operation: 'base:presence:snapshot',
baseId,
entries: snapshot,
});
}
private async unsubscribe(client: Socket, baseId: string): Promise<void> {
const userId = client.data?.userId as string | undefined;
if (!userId) return;
client.leave(getBaseRoomName(baseId));
this.subscriptionsFor(client).delete(baseId);
await this.presence.leave(baseId, userId);
this.emitToBase(baseId, {
operation: 'base:presence:leave',
baseId,
userId,
});
}
private async handlePresence(
client: Socket,
data: Extract<BaseInbound, { operation: 'base:presence' }>,
): Promise<void> {
const userId = client.data?.userId as string | undefined;
if (!userId) return;
if (!client.rooms.has(getBaseRoomName(data.baseId))) return;
const entry: PresenceEntry = {
userId,
cellId: data.cellId ?? null,
selection: data.selection ?? null,
ts: Date.now(),
};
await this.presence.setPresence(data.baseId, entry);
this.emitToBase(data.baseId, {
operation: 'base:presence',
baseId: data.baseId,
...entry,
});
}
private async handlePresenceLeave(
client: Socket,
baseId: string,
): Promise<void> {
const userId = client.data?.userId as string | undefined;
if (!userId) return;
await this.presence.leave(baseId, userId);
this.emitToBase(baseId, {
operation: 'base:presence:leave',
baseId,
userId,
});
}
private async canReadBaseSpace(
userId: string,
spaceId: string,
): Promise<boolean> {
const roles = await this.spaceMemberRepo.getUserSpaceRoles(userId, spaceId);
return !!findHighestUserSpaceRole(roles);
}
private subscriptionsFor(client: Socket): Set<string> {
const existing = client.data.baseSubscriptions as Set<string> | undefined;
if (existing) return existing;
const fresh = new Set<string>();
client.data.baseSubscriptions = fresh;
return fresh;
}
}
@@ -1,13 +1,21 @@
import { import {
BadRequestException, BadRequestException,
ConflictException,
Injectable, Injectable,
Logger,
NotFoundException, NotFoundException,
ServiceUnavailableException,
} from '@nestjs/common'; } from '@nestjs/common';
import { InjectKysely } from 'nestjs-kysely'; import { InjectKysely } from 'nestjs-kysely';
import { InjectQueue } from '@nestjs/bullmq';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { Queue } from 'bullmq';
import { sql, SqlBool } from 'kysely';
import { KyselyDB } from '@docmost/db/types/kysely.types'; import { KyselyDB } from '@docmost/db/types/kysely.types';
import { executeTx } from '@docmost/db/utils'; import { executeTx } from '@docmost/db/utils';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo'; import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo'; import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BaseRepo } from '@docmost/db/repos/base/base.repo';
import { CreatePropertyDto } from '../dto/create-property.dto'; import { CreatePropertyDto } from '../dto/create-property.dto';
import { import {
UpdatePropertyDto, UpdatePropertyDto,
@@ -15,49 +23,135 @@ import {
ReorderPropertyDto, ReorderPropertyDto,
} from '../dto/update-property.dto'; } from '../dto/update-property.dto';
import { import {
BasePropertyType,
BasePropertyTypeValue, BasePropertyTypeValue,
parseTypeOptions, parseTypeOptions,
attemptCellConversion,
validateTypeOptions, validateTypeOptions,
isSystemPropertyType, isSystemPropertyType,
} from '../base.schemas'; } from '../base.schemas';
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered'; import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
import { QueueJob, QueueName } from '../../../integrations/queue/constants';
import {
IBaseCellGcJob,
IBaseTypeConversionJob,
} from '../../../integrations/queue/constants/queue.interface';
import { EventName } from '../../../common/events/event.contants';
import {
BasePropertyCreatedEvent,
BasePropertyDeletedEvent,
BasePropertyReorderedEvent,
BasePropertyUpdatedEvent,
BaseSchemaBumpedEvent,
} from '../events/base-events';
import { processBaseTypeConversion } from '../tasks/base-type-conversion.task';
/*
* Types whose cell values are IDs referencing external records. Converting
* them to any other type (especially text) requires an ID → display
* resolution pass — otherwise `select → text` persists the choice UUID
* instead of its display name (the bug that motivated this job).
*/
const ID_REFERENCING_TYPES: ReadonlySet<BasePropertyTypeValue> = new Set([
BasePropertyType.SELECT,
BasePropertyType.STATUS,
BasePropertyType.MULTI_SELECT,
BasePropertyType.PERSON,
BasePropertyType.FILE,
]);
/*
* Row-count cutoff below which the cell rewrite runs synchronously inside
* the HTTP request. Chosen so even worst-case (file → text with attachment
* name joins) completes comfortably under the default 30s request timeout.
* Larger bases fall back to the BullMQ worker path which flips the type
* only after the rewrite completes, showing a "Converting…" header state
* in the meantime.
*/
const INLINE_CONVERSION_ROW_LIMIT = 2000;
@Injectable() @Injectable()
export class BasePropertyService { export class BasePropertyService {
private readonly logger = new Logger(BasePropertyService.name);
constructor( constructor(
@InjectKysely() private readonly db: KyselyDB, @InjectKysely() private readonly db: KyselyDB,
private readonly basePropertyRepo: BasePropertyRepo, private readonly basePropertyRepo: BasePropertyRepo,
private readonly baseRowRepo: BaseRowRepo, private readonly baseRowRepo: BaseRowRepo,
private readonly baseRepo: BaseRepo,
@InjectQueue(QueueName.BASE_QUEUE) private readonly baseQueue: Queue,
private readonly eventEmitter: EventEmitter2,
) {} ) {}
async create(workspaceId: string, dto: CreatePropertyDto) { async create(workspaceId: string, dto: CreatePropertyDto, actorId?: string) {
const type = dto.type as BasePropertyTypeValue; const type = dto.type as BasePropertyTypeValue;
let validatedTypeOptions = null; const validatedTypeOptions = dto.typeOptions
? parseTypeOptionsOrThrow(type, dto.typeOptions)
if (dto.typeOptions) { : parseTypeOptionsOrThrow(type, {});
validatedTypeOptions = parseTypeOptions(type, dto.typeOptions);
} else {
validatedTypeOptions = parseTypeOptions(type, {});
}
const lastPosition = await this.basePropertyRepo.getLastPosition( const lastPosition = await this.basePropertyRepo.getLastPosition(
dto.baseId, dto.baseId,
); );
const position = generateJitteredKeyBetween(lastPosition, null); const position = generateJitteredKeyBetween(lastPosition, null);
return this.basePropertyRepo.insertProperty({ const created = await executeTx(this.db, async (trx) => {
baseId: dto.baseId, const row = await this.basePropertyRepo.insertProperty(
name: dto.name, {
type: dto.type, baseId: dto.baseId,
position, name: dto.name,
typeOptions: validatedTypeOptions as any, type: dto.type,
workspaceId, position,
typeOptions: validatedTypeOptions as any,
workspaceId,
},
trx,
);
await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
return row;
}); });
const event: BasePropertyCreatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: null,
property: created,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_CREATED, event);
return created;
} }
async update(dto: UpdatePropertyDto) { /*
* Metadata update. Three paths:
*
* - Coercion-safe (number↔text, text↔url, etc.): flip the type/type
* options in one transaction, bump schema_version, return. The engine
* reads cells through schema-on-read extractors so no cell rewrite is
* needed.
* - ID-referencing or system-involving conversion with a small number
* of rows: run the cell rewrite, flip the type, and bump
* schema_version all in one transaction — the HTTP request waits but
* nobody ever sees raw IDs under the new type.
* - Same kind of conversion on a large base: stage the target type on
* `pendingType` / `pendingTypeOptions`, keep the live `type` as-is,
* enqueue the worker. Clients render under the old type (so cells
* resolve to display names, not UUIDs) and show a "Converting…"
* badge until the worker transaction commits and swaps the pending
* pair onto `type`.
*/
async update(
dto: UpdatePropertyDto,
workspaceId: string,
actorId?: string,
) {
const t0 = Date.now();
const tick = (label: string) =>
this.logger.log(
`property-update ${dto.propertyId} ${label}=${Date.now() - t0}ms`,
);
const property = await this.basePropertyRepo.findById(dto.propertyId); const property = await this.basePropertyRepo.findById(dto.propertyId);
tick('after-findById');
if (!property) { if (!property) {
throw new NotFoundException('Property not found'); throw new NotFoundException('Property not found');
} }
@@ -66,56 +160,218 @@ export class BasePropertyService {
throw new BadRequestException('Property does not belong to this base'); throw new BadRequestException('Property does not belong to this base');
} }
// Block concurrent type changes — the worker still owns the previous
// conversion, and letting a second one through would race on `type`.
if (property.pendingType) {
throw new ConflictException(
'A type conversion is already in progress for this property',
);
}
const isTypeChange = dto.type && dto.type !== property.type; const isTypeChange = dto.type && dto.type !== property.type;
const oldType = property.type as BasePropertyTypeValue;
const oldTypeOptions = property.typeOptions;
const newType = (dto.type ?? property.type) as BasePropertyTypeValue; const newType = (dto.type ?? property.type) as BasePropertyTypeValue;
let validatedTypeOptions = property.typeOptions; let validatedTypeOptions = property.typeOptions;
if (dto.typeOptions !== undefined) { if (dto.typeOptions !== undefined) {
validatedTypeOptions = parseTypeOptions(newType, dto.typeOptions) as any; validatedTypeOptions = parseTypeOptionsOrThrow(
newType,
dto.typeOptions,
) as any;
} else if (isTypeChange) { } else if (isTypeChange) {
const result = validateTypeOptions(newType, {}); const result = validateTypeOptions(newType, {});
validatedTypeOptions = result.success ? (result.data as any) : null; validatedTypeOptions = result.success ? (result.data as any) : null;
} }
let conversionSummary: { const involvesSystem =
converted: number; isSystemPropertyType(oldType) || isSystemPropertyType(newType);
cleared: number; const needsIdResolution = ID_REFERENCING_TYPES.has(oldType);
total: number; const needsCellRewrite =
} | null = null; isTypeChange && (involvesSystem || needsIdResolution);
if (isTypeChange) { // --- Path 1: no cell rewrite needed ---------------------------------
const involvesSystem = if (!needsCellRewrite) {
isSystemPropertyType(property.type) || isSystemPropertyType(newType); await executeTx(this.db, async (trx) => {
await this.basePropertyRepo.updateProperty(
if (involvesSystem) {
conversionSummary = await this.clearCellValues(
dto.baseId,
dto.propertyId, dto.propertyId,
{
...(dto.name !== undefined && { name: dto.name }),
...(dto.type !== undefined && { type: dto.type }),
typeOptions: validatedTypeOptions,
},
trx,
); );
} else { if (isTypeChange) {
conversionSummary = await this.convertCellValues( await this.basePropertyRepo.bumpSchemaVersion(dto.propertyId, trx);
dto.baseId, await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
dto.propertyId, }
property.type as BasePropertyTypeValue, });
newType, return this.loadAndEmit(dto, workspaceId, actorId, null);
);
}
} }
await this.basePropertyRepo.updateProperty(dto.propertyId, { // --- Path 2 or 3: cell rewrite needed -------------------------------
...(dto.name !== undefined && { name: dto.name }), const conversionPayload: IBaseTypeConversionJob = {
...(dto.type !== undefined && { type: dto.type }), baseId: dto.baseId,
typeOptions: validatedTypeOptions, propertyId: dto.propertyId,
}); workspaceId,
fromType: oldType,
toType: newType,
fromTypeOptions: oldTypeOptions,
toTypeOptions: validatedTypeOptions,
clearMode: involvesSystem,
actorId,
};
const updatedProperty = await this.basePropertyRepo.findById( // Count only the rows whose cell jsonb has this property's key — the
// set the worker will actually rewrite. A 100k-row base with the
// property set on 12 rows is trivial to convert inline; the previous
// count-all-live-rows check was routing those to the worker.
const rowsToConvert = await this.countRowsToConvert(
dto.baseId,
workspaceId,
dto.propertyId, dto.propertyId,
); );
tick(`after-countRowsToConvert(${rowsToConvert})`);
return { property: updatedProperty, conversionSummary }; if (rowsToConvert <= INLINE_CONVERSION_ROW_LIMIT) {
tick('taking-inline-path');
// Path 2: inline rewrite. Apply the name-only fields (if any), run
// the rewrite, then flip the type — all in one transaction so
// readers only ever see a consistent snapshot.
const schemaVersion = await executeTx(this.db, async (trx) => {
if (dto.name !== undefined) {
await this.basePropertyRepo.updateProperty(
dto.propertyId,
{ name: dto.name },
trx,
);
}
await processBaseTypeConversion(
this.db,
this.baseRowRepo,
conversionPayload,
{ trx },
);
await this.basePropertyRepo.updateProperty(
dto.propertyId,
{
type: newType,
typeOptions: validatedTypeOptions,
},
trx,
);
await this.basePropertyRepo.bumpSchemaVersion(dto.propertyId, trx);
return this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
});
tick('inline-tx-done');
const bumpEvent: BaseSchemaBumpedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: null,
schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_SCHEMA_BUMPED, bumpEvent);
return this.loadAndEmit(dto, workspaceId, actorId, null);
}
// Path 3: stage the new type on pending_*, keep live `type` alone,
// and hand off to the worker. A best-effort revert clears the staging
// fields if the enqueue itself fails.
tick('taking-worker-path');
await executeTx(this.db, async (trx) => {
await this.basePropertyRepo.updateProperty(
dto.propertyId,
{
...(dto.name !== undefined && { name: dto.name }),
pendingType: newType,
pendingTypeOptions: validatedTypeOptions,
},
trx,
);
});
tick('after-set-pending');
let jobId: string | null = null;
try {
const job = await this.baseQueue.add(
QueueJob.BASE_TYPE_CONVERSION,
conversionPayload,
{ attempts: 1 },
);
jobId = String(job.id);
tick(`after-queue.add(${jobId})`);
} catch (err) {
this.logger.error(
`Enqueue of type-conversion failed for property ${dto.propertyId}; clearing pending state`,
err as Error,
);
try {
await this.basePropertyRepo.clearPendingTypeChange(dto.propertyId);
} catch (revertErr) {
this.logger.error(
`Failed to clear pending state on ${dto.propertyId}. Manual intervention required.`,
revertErr as Error,
);
}
throw new ServiceUnavailableException(
'Type conversion queue unavailable. Property update rolled back.',
);
}
const out = await this.loadAndEmit(dto, workspaceId, actorId, jobId);
tick('return');
return out;
} }
async delete(dto: DeletePropertyDto) { /*
* Reloads the property and emits `base.property.updated`. The emission
* has to happen after the outer transaction commits so socket consumers
* never race ahead of visibility.
*/
private async loadAndEmit(
dto: UpdatePropertyDto,
workspaceId: string,
actorId: string | undefined,
jobId: string | null,
) {
const updated = await this.basePropertyRepo.findById(dto.propertyId);
if (updated) {
const event: BasePropertyUpdatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: dto.requestId ?? null,
property: updated,
schemaVersion: updated.schemaVersion,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_UPDATED, event);
}
return { property: updated, jobId };
}
private async countRowsToConvert(
baseId: string,
workspaceId: string,
propertyId: string,
): Promise<number> {
const row = await this.db
.selectFrom('baseRows')
.select(sql<string>`count(*)`.as('n'))
.where('baseId', '=', baseId)
.where('workspaceId', '=', workspaceId)
.where('deletedAt', 'is', null)
.where(sql<SqlBool>`cells ? ${propertyId}`)
.executeTakeFirst();
return Number(row?.n ?? 0);
}
async delete(
dto: DeletePropertyDto,
workspaceId: string,
actorId?: string,
) {
const property = await this.basePropertyRepo.findById(dto.propertyId); const property = await this.basePropertyRepo.findById(dto.propertyId);
if (!property) { if (!property) {
throw new NotFoundException('Property not found'); throw new NotFoundException('Property not found');
@@ -129,13 +385,56 @@ export class BasePropertyService {
throw new BadRequestException('Cannot delete the primary property'); throw new BadRequestException('Cannot delete the primary property');
} }
// Soft-delete so queries filter the property out immediately, then
// enqueue cell-gc to scrub cell keys and hard-delete. If the enqueue
// fails, revert the soft-delete so the property isn't orphaned.
await executeTx(this.db, async (trx) => { await executeTx(this.db, async (trx) => {
await this.basePropertyRepo.deleteProperty(dto.propertyId, trx); await this.basePropertyRepo.softDelete(dto.propertyId, trx);
await this.baseRowRepo.removeCellKey(dto.baseId, dto.propertyId, trx); await this.baseRepo.bumpSchemaVersion(dto.baseId, trx);
}); });
const payload: IBaseCellGcJob = {
baseId: dto.baseId,
propertyId: dto.propertyId,
workspaceId,
};
try {
await this.baseQueue.add(QueueJob.BASE_CELL_GC, payload, { attempts: 2 });
} catch (err) {
this.logger.error(
`Enqueue of cell-gc failed for property ${dto.propertyId}; reverting soft-delete`,
err as Error,
);
try {
await this.basePropertyRepo.updateProperty(dto.propertyId, {
deletedAt: null,
});
} catch (revertErr) {
this.logger.error(
`Revert failed for property ${dto.propertyId}. Manual intervention required.`,
revertErr as Error,
);
}
throw new ServiceUnavailableException(
'Cell-GC queue unavailable. Property delete rolled back.',
);
}
const event: BasePropertyDeletedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: actorId ?? null,
requestId: dto.requestId ?? null,
propertyId: dto.propertyId,
};
this.eventEmitter.emit(EventName.BASE_PROPERTY_DELETED, event);
} }
async reorder(dto: ReorderPropertyDto) { async reorder(
dto: ReorderPropertyDto,
workspaceId: string,
actorId?: string,
) {
const property = await this.basePropertyRepo.findById(dto.propertyId); const property = await this.basePropertyRepo.findById(dto.propertyId);
if (!property) { if (!property) {
throw new NotFoundException('Property not found'); throw new NotFoundException('Property not found');
@@ -148,69 +447,29 @@ export class BasePropertyService {
await this.basePropertyRepo.updateProperty(dto.propertyId, { await this.basePropertyRepo.updateProperty(dto.propertyId, {
position: dto.position, position: dto.position,
}); });
}
private async clearCellValues( const event: BasePropertyReorderedEvent = {
baseId: string, baseId: dto.baseId,
propertyId: string, workspaceId,
): Promise<{ converted: number; cleared: number; total: number }> { actorId: actorId ?? null,
const rows = await this.baseRowRepo.findAllByBaseId(baseId); requestId: dto.requestId ?? null,
const updates: Array<{ id: string; cells: Record<string, unknown> }> = []; propertyId: dto.propertyId,
position: dto.position,
for (const row of rows) { };
const cells = row.cells as Record<string, unknown>; this.eventEmitter.emit(EventName.BASE_PROPERTY_REORDERED, event);
if (propertyId in cells) { }
updates.push({ id: row.id, cells: { [propertyId]: null } }); }
}
} function parseTypeOptionsOrThrow(
type: BasePropertyTypeValue,
if (updates.length > 0) { typeOptions: unknown,
await executeTx(this.db, async (trx) => { ): unknown {
await this.baseRowRepo.batchUpdateCells(updates, trx); try {
}); return parseTypeOptions(type, typeOptions);
} } catch (err) {
throw new BadRequestException({
return { converted: 0, cleared: updates.length, total: updates.length }; message: 'Invalid typeOptions',
} issues: (err as any)?.issues ?? [],
});
private async convertCellValues(
baseId: string,
propertyId: string,
fromType: BasePropertyTypeValue,
toType: BasePropertyTypeValue,
): Promise<{ converted: number; cleared: number; total: number }> {
const rows = await this.baseRowRepo.findAllByBaseId(baseId);
let converted = 0;
let cleared = 0;
let total = 0;
const updates: Array<{ id: string; cells: Record<string, unknown> }> = [];
for (const row of rows) {
const cells = row.cells as Record<string, unknown>;
if (!(propertyId in cells)) {
continue;
}
total++;
const currentValue = cells[propertyId];
const result = attemptCellConversion(fromType, toType, currentValue);
if (result.converted) {
converted++;
updates.push({ id: row.id, cells: { [propertyId]: result.value } });
} else {
cleared++;
updates.push({ id: row.id, cells: { [propertyId]: null } });
}
}
if (updates.length > 0) {
await executeTx(this.db, async (trx) => {
await this.baseRowRepo.batchUpdateCells(updates, trx);
});
}
return { converted, cleared, total };
} }
} }
@@ -4,6 +4,7 @@ import {
NotFoundException, NotFoundException,
} from '@nestjs/common'; } from '@nestjs/common';
import { InjectKysely } from 'nestjs-kysely'; import { InjectKysely } from 'nestjs-kysely';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { KyselyDB } from '@docmost/db/types/kysely.types'; import { KyselyDB } from '@docmost/db/types/kysely.types';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo'; import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo'; import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
@@ -11,6 +12,7 @@ import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
import { CreateRowDto } from '../dto/create-row.dto'; import { CreateRowDto } from '../dto/create-row.dto';
import { import {
UpdateRowDto, UpdateRowDto,
DeleteRowDto,
ListRowsDto, ListRowsDto,
ReorderRowDto, ReorderRowDto,
} from '../dto/update-row.dto'; } from '../dto/update-row.dto';
@@ -22,6 +24,21 @@ import {
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered'; import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
import { PaginationOptions } from '@docmost/db/pagination/pagination-options'; import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
import { BaseProperty } from '@docmost/db/types/entity.types'; import { BaseProperty } from '@docmost/db/types/entity.types';
import {
FilterNode,
PropertySchema,
SearchSpec,
filterGroupSchema,
searchSchema,
validateFilterTree,
} from '../engine';
import { EventName } from '../../../common/events/event.contants';
import {
BaseRowCreatedEvent,
BaseRowDeletedEvent,
BaseRowReorderedEvent,
BaseRowUpdatedEvent,
} from '../events/base-events';
@Injectable() @Injectable()
export class BaseRowService { export class BaseRowService {
@@ -30,19 +47,24 @@ export class BaseRowService {
private readonly baseRowRepo: BaseRowRepo, private readonly baseRowRepo: BaseRowRepo,
private readonly basePropertyRepo: BasePropertyRepo, private readonly basePropertyRepo: BasePropertyRepo,
private readonly baseViewRepo: BaseViewRepo, private readonly baseViewRepo: BaseViewRepo,
private readonly eventEmitter: EventEmitter2,
) {} ) {}
async create(userId: string, workspaceId: string, dto: CreateRowDto) { async create(userId: string, workspaceId: string, dto: CreateRowDto) {
let position: string; let position: string;
if (dto.afterRowId) { if (dto.afterRowId) {
const afterRow = await this.baseRowRepo.findById(dto.afterRowId); const afterRow = await this.baseRowRepo.findById(dto.afterRowId, {
workspaceId,
});
if (!afterRow || afterRow.baseId !== dto.baseId) { if (!afterRow || afterRow.baseId !== dto.baseId) {
throw new BadRequestException('Invalid afterRowId'); throw new BadRequestException('Invalid afterRowId');
} }
position = generateJitteredKeyBetween(afterRow.position, null); position = generateJitteredKeyBetween(afterRow.position, null);
} else { } else {
const lastPosition = await this.baseRowRepo.getLastPosition(dto.baseId); const lastPosition = await this.baseRowRepo.getLastPosition(dto.baseId, {
workspaceId,
});
position = generateJitteredKeyBetween(lastPosition, null); position = generateJitteredKeyBetween(lastPosition, null);
} }
@@ -52,68 +74,117 @@ export class BaseRowService {
validatedCells = this.validateCells(dto.cells, properties); validatedCells = this.validateCells(dto.cells, properties);
} }
return this.baseRowRepo.insertRow({ const created = await this.baseRowRepo.insertRow({
baseId: dto.baseId, baseId: dto.baseId,
cells: validatedCells as any, cells: validatedCells as any,
position, position,
creatorId: userId, creatorId: userId,
workspaceId, workspaceId,
}); });
const event: BaseRowCreatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId,
requestId: dto.requestId ?? null,
row: created,
};
this.eventEmitter.emit(EventName.BASE_ROW_CREATED, event);
return created;
} }
async getRowInfo(rowId: string, baseId: string) { async getRowInfo(rowId: string, baseId: string, workspaceId: string) {
const row = await this.baseRowRepo.findById(rowId); const row = await this.baseRowRepo.findById(rowId, { workspaceId });
if (!row || row.baseId !== baseId) { if (!row || row.baseId !== baseId) {
throw new NotFoundException('Row not found'); throw new NotFoundException('Row not found');
} }
return row; return row;
} }
async update(dto: UpdateRowDto, userId?: string) { async update(dto: UpdateRowDto, workspaceId: string, userId?: string) {
const row = await this.baseRowRepo.findById(dto.rowId); const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
const validatedCells = this.validateCells(dto.cells, properties);
const updated = await this.baseRowRepo.updateCells(
dto.rowId,
validatedCells,
{
baseId: dto.baseId,
workspaceId,
actorId: userId,
},
);
if (!updated) {
throw new NotFoundException('Row not found');
}
const event: BaseRowUpdatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: dto.requestId ?? null,
rowId: dto.rowId,
patch: dto.cells,
updatedCells: validatedCells,
};
this.eventEmitter.emit(EventName.BASE_ROW_UPDATED, event);
return updated;
}
async delete(dto: DeleteRowDto, workspaceId: string, userId?: string) {
const row = await this.baseRowRepo.findById(dto.rowId, { workspaceId });
if (!row || row.baseId !== dto.baseId) { if (!row || row.baseId !== dto.baseId) {
throw new NotFoundException('Row not found'); throw new NotFoundException('Row not found');
} }
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId); await this.baseRowRepo.softDelete(dto.rowId, {
const validatedCells = this.validateCells(dto.cells, properties); baseId: dto.baseId,
workspaceId,
});
await this.baseRowRepo.updateCells(dto.rowId, validatedCells, userId); const event: BaseRowDeletedEvent = {
baseId: dto.baseId,
return this.baseRowRepo.findById(dto.rowId); workspaceId,
actorId: userId ?? null,
requestId: dto.requestId ?? null,
rowId: dto.rowId,
};
this.eventEmitter.emit(EventName.BASE_ROW_DELETED, event);
} }
async delete(rowId: string, baseId: string) { async list(
const row = await this.baseRowRepo.findById(rowId); dto: ListRowsDto,
if (!row || row.baseId !== baseId) { pagination: PaginationOptions,
throw new NotFoundException('Row not found'); workspaceId: string,
} ) {
await this.baseRowRepo.softDelete(rowId);
}
async list(dto: ListRowsDto, pagination: PaginationOptions) {
const hasFilters = dto.filters && dto.filters.length > 0;
const hasSorts = dto.sorts && dto.sorts.length > 0;
if (!hasFilters && !hasSorts) {
return this.baseRowRepo.findByBaseId(dto.baseId, pagination);
}
const properties = await this.basePropertyRepo.findByBaseId(dto.baseId); const properties = await this.basePropertyRepo.findByBaseId(dto.baseId);
const propertyTypeMap = new Map(properties.map((p) => [p.id, p.type])); const schema: PropertySchema = new Map(
properties.map((p) => [p.id, p]),
return this.baseRowRepo.findByBaseIdFiltered(
dto.baseId,
dto.filters ?? [],
dto.sorts ?? [],
propertyTypeMap,
pagination,
); );
const filter = this.normaliseFilter(dto);
const search = this.normaliseSearch(dto.search);
const sorts = dto.sorts?.map((s) => ({
propertyId: s.propertyId,
direction: s.direction,
}));
return this.baseRowRepo.list({
baseId: dto.baseId,
workspaceId,
filter,
sorts,
search,
schema,
pagination,
});
} }
async reorder(dto: ReorderRowDto) { async reorder(dto: ReorderRowDto, workspaceId: string, userId?: string) {
const row = await this.baseRowRepo.findById(dto.rowId); const row = await this.baseRowRepo.findById(dto.rowId, { workspaceId });
if (!row || row.baseId !== dto.baseId) { if (!row || row.baseId !== dto.baseId) {
throw new NotFoundException('Row not found'); throw new NotFoundException('Row not found');
} }
@@ -124,7 +195,52 @@ export class BaseRowService {
throw new BadRequestException('Invalid position value'); throw new BadRequestException('Invalid position value');
} }
await this.baseRowRepo.updatePosition(dto.rowId, dto.position); await this.baseRowRepo.updatePosition(dto.rowId, dto.position, {
baseId: dto.baseId,
workspaceId,
});
const event: BaseRowReorderedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: dto.requestId ?? null,
rowId: dto.rowId,
position: dto.position,
};
this.eventEmitter.emit(EventName.BASE_ROW_REORDERED, event);
}
// --- private helpers ------------------------------------------------
private normaliseFilter(dto: ListRowsDto): FilterNode | undefined {
if (!dto.filter) return undefined;
const parsed = filterGroupSchema.safeParse(dto.filter);
if (!parsed.success) {
throw new BadRequestException({
message: 'Invalid filter tree',
issues: parsed.error.issues,
});
}
try {
validateFilterTree(parsed.data);
} catch (err) {
throw new BadRequestException((err as Error).message);
}
return parsed.data;
}
private normaliseSearch(raw: unknown): SearchSpec | undefined {
if (raw == null) return undefined;
const parsed = searchSchema.safeParse(raw);
if (!parsed.success) {
throw new BadRequestException({
message: 'Invalid search spec',
issues: parsed.error.issues,
});
}
return parsed.data;
} }
private validateCells( private validateCells(
@@ -3,15 +3,25 @@ import {
Injectable, Injectable,
NotFoundException, NotFoundException,
} from '@nestjs/common'; } from '@nestjs/common';
import { EventEmitter2 } from '@nestjs/event-emitter';
import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo'; import { BaseViewRepo } from '@docmost/db/repos/base/base-view.repo';
import { CreateViewDto } from '../dto/create-view.dto'; import { CreateViewDto } from '../dto/create-view.dto';
import { UpdateViewDto, DeleteViewDto } from '../dto/update-view.dto'; import { UpdateViewDto, DeleteViewDto } from '../dto/update-view.dto';
import { viewConfigSchema } from '../base.schemas'; import { viewConfigSchema } from '../base.schemas';
import { generateJitteredKeyBetween } from 'fractional-indexing-jittered'; import { generateJitteredKeyBetween } from 'fractional-indexing-jittered';
import { EventName } from '../../../common/events/event.contants';
import {
BaseViewCreatedEvent,
BaseViewDeletedEvent,
BaseViewUpdatedEvent,
} from '../events/base-events';
@Injectable() @Injectable()
export class BaseViewService { export class BaseViewService {
constructor(private readonly baseViewRepo: BaseViewRepo) {} constructor(
private readonly baseViewRepo: BaseViewRepo,
private readonly eventEmitter: EventEmitter2,
) {}
async create(userId: string, workspaceId: string, dto: CreateViewDto) { async create(userId: string, workspaceId: string, dto: CreateViewDto) {
let validatedConfig = {}; let validatedConfig = {};
@@ -26,10 +36,12 @@ export class BaseViewService {
validatedConfig = result.data; validatedConfig = result.data;
} }
const lastPosition = await this.baseViewRepo.getLastPosition(dto.baseId); const lastPosition = await this.baseViewRepo.getLastPosition(dto.baseId, {
workspaceId,
});
const position = generateJitteredKeyBetween(lastPosition, null); const position = generateJitteredKeyBetween(lastPosition, null);
return this.baseViewRepo.insertView({ const created = await this.baseViewRepo.insertView({
baseId: dto.baseId, baseId: dto.baseId,
name: dto.name, name: dto.name,
type: dto.type ?? 'table', type: dto.type ?? 'table',
@@ -38,10 +50,21 @@ export class BaseViewService {
workspaceId, workspaceId,
creatorId: userId, creatorId: userId,
}); });
const event: BaseViewCreatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId,
requestId: null,
view: created,
};
this.eventEmitter.emit(EventName.BASE_VIEW_CREATED, event);
return created;
} }
async update(dto: UpdateViewDto) { async update(dto: UpdateViewDto, workspaceId: string, userId?: string) {
const view = await this.baseViewRepo.findById(dto.viewId); const view = await this.baseViewRepo.findById(dto.viewId, { workspaceId });
if (!view) { if (!view) {
throw new NotFoundException('View not found'); throw new NotFoundException('View not found');
} }
@@ -62,17 +85,36 @@ export class BaseViewService {
validatedConfig = result.data; validatedConfig = result.data;
} }
await this.baseViewRepo.updateView(dto.viewId, { await this.baseViewRepo.updateView(
...(dto.name !== undefined && { name: dto.name }), dto.viewId,
...(dto.type !== undefined && { type: dto.type }), {
...(validatedConfig !== undefined && { config: validatedConfig as any }), ...(dto.name !== undefined && { name: dto.name }),
...(dto.type !== undefined && { type: dto.type }),
...(validatedConfig !== undefined && { config: validatedConfig as any }),
},
{ workspaceId },
);
const updated = await this.baseViewRepo.findById(dto.viewId, {
workspaceId,
}); });
return this.baseViewRepo.findById(dto.viewId); if (updated) {
const event: BaseViewUpdatedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: null,
view: updated,
};
this.eventEmitter.emit(EventName.BASE_VIEW_UPDATED, event);
}
return updated;
} }
async delete(dto: DeleteViewDto) { async delete(dto: DeleteViewDto, workspaceId: string, userId?: string) {
const view = await this.baseViewRepo.findById(dto.viewId); const view = await this.baseViewRepo.findById(dto.viewId, { workspaceId });
if (!view) { if (!view) {
throw new NotFoundException('View not found'); throw new NotFoundException('View not found');
} }
@@ -81,15 +123,26 @@ export class BaseViewService {
throw new BadRequestException('View does not belong to this base'); throw new BadRequestException('View does not belong to this base');
} }
const viewCount = await this.baseViewRepo.countByBaseId(dto.baseId); const viewCount = await this.baseViewRepo.countByBaseId(dto.baseId, {
workspaceId,
});
if (viewCount <= 1) { if (viewCount <= 1) {
throw new BadRequestException('Cannot delete the last view'); throw new BadRequestException('Cannot delete the last view');
} }
await this.baseViewRepo.deleteView(dto.viewId); await this.baseViewRepo.deleteView(dto.viewId, { workspaceId });
const event: BaseViewDeletedEvent = {
baseId: dto.baseId,
workspaceId,
actorId: userId ?? null,
requestId: null,
viewId: dto.viewId,
};
this.eventEmitter.emit(EventName.BASE_VIEW_DELETED, event);
} }
async listByBaseId(baseId: string) { async listByBaseId(baseId: string, workspaceId: string) {
return this.baseViewRepo.findByBaseId(baseId); return this.baseViewRepo.findByBaseId(baseId, { workspaceId });
} }
} }
@@ -61,7 +61,7 @@ export class BaseService {
workspaceId, workspaceId,
creatorId: userId, creatorId: userId,
}, },
trx, { trx },
); );
return this.baseRepo.findById(base.id, { return this.baseRepo.findById(base.id, {
@@ -0,0 +1,35 @@
import { Logger } from '@nestjs/common';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import { BasePropertyRepo } from '@docmost/db/repos/base/base-property.repo';
import { KyselyDB } from '@docmost/db/types/kysely.types';
import { executeTx } from '@docmost/db/utils';
import { IBaseCellGcJob } from '../../../integrations/queue/constants/queue.interface';
const logger = new Logger('BaseCellGcTask');
/*
* Removes a soft-deleted property's key from every row in the base, then
* hard-deletes the property record. Both operations run inside a single
* transaction — without it, a failure between `removeCellKey` and
* `hardDelete` leaves rows scrubbed while the property row lingers,
* requiring manual cleanup. `removeCellKey` is a single
* `UPDATE ... SET cells = cells - $propId` statement.
*/
export async function processBaseCellGc(
db: KyselyDB,
baseRowRepo: BaseRowRepo,
basePropertyRepo: BasePropertyRepo,
data: IBaseCellGcJob,
): Promise<void> {
const { baseId, propertyId, workspaceId } = data;
await executeTx(db, async (trx) => {
await baseRowRepo.removeCellKey(baseId, propertyId, {
workspaceId,
trx,
});
await basePropertyRepo.hardDelete(propertyId, trx);
});
logger.log(`cell-gc complete base=${baseId} prop=${propertyId}`);
}
@@ -0,0 +1,203 @@
import { Logger } from '@nestjs/common';
import { KyselyDB, KyselyTransaction } from '@docmost/db/types/kysely.types';
import { dbOrTx } from '@docmost/db/utils';
import { BaseRowRepo } from '@docmost/db/repos/base/base-row.repo';
import {
BasePropertyType,
BasePropertyTypeValue,
CellConversionContext,
attemptCellConversion,
} from '../base.schemas';
import { IBaseTypeConversionJob } from '../../../integrations/queue/constants/queue.interface';
const logger = new Logger('BaseTypeConversionTask');
const CHUNK_SIZE = 1000;
/*
* Handles the cell-rewrite side of a property type change on a base.
* Runs per-chunk batched UPDATEs so Node RAM stays flat regardless of row
* count. When the source type stores IDs (select / multiSelect / person /
* file), it resolves to display values before writing — fixing the
* `String(optionId)` bug that the old synchronous path produced.
*
* The `trx` option lets callers run the whole rewrite inside an outer
* transaction. That matters for the inline path in `BasePropertyService`,
* where the cell rewrite + `type` swap + `schema_version` bump must land
* atomically so readers never observe cells written for a type that hasn't
* flipped yet.
*/
export async function processBaseTypeConversion(
db: KyselyDB,
baseRowRepo: BaseRowRepo,
data: IBaseTypeConversionJob,
opts?: {
progress?: (processed: number) => Promise<void> | void;
trx?: KyselyTransaction;
},
): Promise<{ converted: number; cleared: number; total: number }> {
const {
baseId,
propertyId,
workspaceId,
fromType,
toType,
fromTypeOptions,
clearMode,
actorId,
} = data;
const progress = opts?.progress;
const trx = opts?.trx;
const queryDb = dbOrTx(db, trx);
let total = 0;
let converted = 0;
let cleared = 0;
// Only rows whose cell jsonb actually has this property key need
// rewriting — everything else is already consistent with the new type
// (empty value → empty value). Skips the full-table scan on bases
// where the property was only ever set on a few rows.
for await (const chunk of baseRowRepo.streamByBaseId(baseId, {
workspaceId,
chunkSize: CHUNK_SIZE,
trx,
withCellKey: propertyId,
})) {
const ctx = await buildCtx(
queryDb,
chunk,
propertyId,
fromType,
fromTypeOptions,
);
const updates: Array<{ id: string; patch: Record<string, unknown> }> = [];
for (const row of chunk) {
const cells = (row.cells ?? {}) as Record<string, unknown>;
if (!(propertyId in cells)) continue;
total++;
if (clearMode) {
updates.push({ id: row.id, patch: { [propertyId]: null } });
cleared++;
continue;
}
const result = attemptCellConversion(
fromType as BasePropertyTypeValue,
toType as BasePropertyTypeValue,
cells[propertyId],
ctx,
);
if (result.converted) {
converted++;
updates.push({
id: row.id,
patch: { [propertyId]: result.value ?? null },
});
} else {
cleared++;
updates.push({ id: row.id, patch: { [propertyId]: null } });
}
}
if (updates.length > 0) {
await baseRowRepo.batchUpdateCells(updates, {
baseId,
workspaceId,
actorId,
trx,
});
}
if (progress) await progress(total);
}
logger.log(
`type-conversion ${fromType}${toType} base=${baseId} prop=${propertyId} total=${total} converted=${converted} cleared=${cleared}`,
);
return { converted, cleared, total };
}
/*
* Builds the resolution context for a chunk. For select/multiSelect the
* choice map lives in the property's typeOptions (already in the job
* payload). For person and file, we batch-query the IDs present in this
* chunk.
*/
async function buildCtx(
db: KyselyDB | KyselyTransaction,
chunk: Array<{ cells: unknown }>,
propertyId: string,
fromType: string,
fromTypeOptions: unknown,
): Promise<CellConversionContext> {
const ctx: CellConversionContext = { fromTypeOptions };
if (fromType === BasePropertyType.PERSON) {
const ids = collectIds(chunk, propertyId);
if (ids.size > 0) {
const rows = await db
.selectFrom('users')
.select(['id', 'name', 'email'])
.where('id', 'in', Array.from(ids))
.execute();
ctx.userNames = new Map(
rows.map((u) => [u.id, u.name || u.email || '']),
);
}
} else if (fromType === BasePropertyType.FILE) {
const ids = collectFileIds(chunk, propertyId);
if (ids.size > 0) {
const rows = await db
.selectFrom('attachments')
.select(['id', 'fileName'])
.where('id', 'in', Array.from(ids))
.execute();
ctx.attachmentNames = new Map(rows.map((a) => [a.id, a.fileName]));
}
}
return ctx;
}
function collectIds(
chunk: Array<{ cells: unknown }>,
propertyId: string,
): Set<string> {
const out = new Set<string>();
for (const row of chunk) {
const v = (row.cells as any)?.[propertyId];
if (v == null) continue;
if (Array.isArray(v)) {
for (const item of v) {
if (typeof item === 'string' && item.length > 0) out.add(item);
}
} else if (typeof v === 'string' && v.length > 0) {
out.add(v);
}
}
return out;
}
function collectFileIds(
chunk: Array<{ cells: unknown }>,
propertyId: string,
): Set<string> {
const out = new Set<string>();
for (const row of chunk) {
const v = (row.cells as any)?.[propertyId];
if (!Array.isArray(v)) continue;
for (const f of v) {
if (typeof f === 'string' && f.length > 0) {
out.add(f);
} else if (f && typeof f === 'object' && typeof f.id === 'string') {
out.add(f.id);
}
}
}
return out;
}
@@ -0,0 +1,333 @@
import { type Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
// --- Columns -----------------------------------------------------------
await sql`
ALTER TABLE base_rows
ADD COLUMN IF NOT EXISTS search_text text,
ADD COLUMN IF NOT EXISTS search_tsv tsvector
`.execute(db);
await sql`
ALTER TABLE base_properties
ADD COLUMN IF NOT EXISTS schema_version integer NOT NULL DEFAULT 1,
ADD COLUMN IF NOT EXISTS deleted_at timestamptz
`.execute(db);
await sql`
ALTER TABLE bases
ADD COLUMN IF NOT EXISTS schema_version integer NOT NULL DEFAULT 1
`.execute(db);
// --- Schema-on-read extractors ----------------------------------------
// Coercion-safe: uncoercible values return NULL, never raise.
// IMMUTABLE so the planner can inline them into expression indexes later.
await sql`
CREATE OR REPLACE FUNCTION base_cell_text(cells jsonb, prop uuid)
RETURNS text
LANGUAGE sql IMMUTABLE STRICT PARALLEL SAFE
AS $$ SELECT cells->>prop::text $$
`.execute(db);
await sql`
CREATE OR REPLACE FUNCTION base_cell_numeric(cells jsonb, prop uuid)
RETURNS numeric
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
AS $$
BEGIN
RETURN (cells->>prop::text)::numeric;
EXCEPTION WHEN others THEN
RETURN NULL;
END;
$$
`.execute(db);
await sql`
CREATE OR REPLACE FUNCTION base_cell_timestamptz(cells jsonb, prop uuid)
RETURNS timestamptz
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
AS $$
BEGIN
RETURN (cells->>prop::text)::timestamptz;
EXCEPTION WHEN others THEN
RETURN NULL;
END;
$$
`.execute(db);
await sql`
CREATE OR REPLACE FUNCTION base_cell_bool(cells jsonb, prop uuid)
RETURNS boolean
LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE
AS $$
BEGIN
RETURN (cells->>prop::text)::boolean;
EXCEPTION WHEN others THEN
RETURN NULL;
END;
$$
`.execute(db);
await sql`
CREATE OR REPLACE FUNCTION base_cell_array(cells jsonb, prop uuid)
RETURNS jsonb
LANGUAGE sql IMMUTABLE STRICT PARALLEL SAFE
AS $$ SELECT cells->prop::text $$
`.execute(db);
// --- Surgical JSONB patch (vs. whole-blob `||`) -----------------------
await sql`
CREATE OR REPLACE FUNCTION jsonb_set_many(target jsonb, patches jsonb)
RETURNS jsonb
LANGUAGE plpgsql IMMUTABLE PARALLEL SAFE
AS $$
DECLARE
k text;
v jsonb;
result jsonb := coalesce(target, '{}'::jsonb);
BEGIN
IF patches IS NULL OR jsonb_typeof(patches) <> 'object' THEN
RETURN result;
END IF;
FOR k, v IN SELECT * FROM jsonb_each(patches) LOOP
IF v = 'null'::jsonb THEN
result := result - k;
ELSE
result := jsonb_set(result, ARRAY[k], v, true);
END IF;
END LOOP;
RETURN result;
END;
$$
`.execute(db);
// --- Search text builder (select/multiSelect resolved to choice names) --
// STABLE (not IMMUTABLE) because it reads base_properties.
//
// Transaction-scoped cache: the property list for a given base_id is
// read once per transaction and cached via a local GUC. Bulk writes
// (CSV import, batch cell update, trigger on N rows) share one lookup
// instead of subquerying base_properties per row.
await sql`
CREATE OR REPLACE FUNCTION build_base_row_search_text(
_cells jsonb,
_base_id uuid
) RETURNS text
LANGUAGE plpgsql STABLE PARALLEL SAFE
AS $$
DECLARE
_parts text[] := ARRAY[]::text[];
_prop jsonb;
_value text;
_arr jsonb;
_elem jsonb;
_resolved text;
_cache_key text;
_cached text;
_props jsonb;
BEGIN
IF _cells IS NULL OR _cells = '{}'::jsonb OR _base_id IS NULL THEN
RETURN NULL;
END IF;
-- Transaction-scoped cache of the base's property list.
_cache_key := 'bases.prop_cache_' || replace(_base_id::text, '-', '_');
_cached := current_setting(_cache_key, true);
IF _cached IS NULL OR _cached = '' THEN
SELECT coalesce(
jsonb_agg(jsonb_build_object(
'id', id,
'type', type,
'type_options', type_options
)),
'[]'::jsonb
)
INTO _props
FROM base_properties
WHERE base_id = _base_id AND deleted_at IS NULL;
PERFORM set_config(_cache_key, _props::text, true);
ELSE
_props := _cached::jsonb;
END IF;
FOR _prop IN SELECT * FROM jsonb_array_elements(_props)
LOOP
IF (_prop->>'type') IN ('text', 'url', 'email') THEN
_value := _cells->>(_prop->>'id');
IF _value IS NOT NULL AND _value <> '' THEN
_parts := array_append(_parts, _value);
END IF;
ELSIF (_prop->>'type') IN ('select', 'status') THEN
_value := _cells->>(_prop->>'id');
IF _value IS NOT NULL AND _value <> '' THEN
SELECT c->>'name' INTO _resolved
FROM jsonb_array_elements(coalesce(_prop->'type_options'->'choices', '[]'::jsonb)) AS c
WHERE c->>'id' = _value
LIMIT 1;
IF _resolved IS NOT NULL AND _resolved <> '' THEN
_parts := array_append(_parts, _resolved);
END IF;
END IF;
ELSIF (_prop->>'type') = 'multiSelect' THEN
_arr := _cells->(_prop->>'id');
IF jsonb_typeof(_arr) = 'array' THEN
FOR _elem IN SELECT * FROM jsonb_array_elements(_arr)
LOOP
SELECT c->>'name' INTO _resolved
FROM jsonb_array_elements(coalesce(_prop->'type_options'->'choices', '[]'::jsonb)) AS c
WHERE c->>'id' = _elem#>>'{}'
LIMIT 1;
IF _resolved IS NOT NULL AND _resolved <> '' THEN
_parts := array_append(_parts, _resolved);
END IF;
END LOOP;
END IF;
END IF;
END LOOP;
IF array_length(_parts, 1) IS NULL THEN
RETURN NULL;
END IF;
RETURN f_unaccent(array_to_string(_parts, ' '));
END;
$$
`.execute(db);
// --- Row search trigger -----------------------------------------------
await sql`
CREATE OR REPLACE FUNCTION base_rows_search_trigger() RETURNS trigger
LANGUAGE plpgsql AS $$
BEGIN
NEW.search_text := build_base_row_search_text(NEW.cells, NEW.base_id);
NEW.search_tsv := to_tsvector('english', coalesce(NEW.search_text, ''));
RETURN NEW;
END;
$$
`.execute(db);
await sql`
CREATE OR REPLACE TRIGGER base_rows_search_update
BEFORE INSERT OR UPDATE ON base_rows
FOR EACH ROW EXECUTE FUNCTION base_rows_search_trigger()
`.execute(db);
// --- Indexes ----------------------------------------------------------
// Replace the default-opclass GIN created by the initial bases migration
// with the smaller/faster jsonb_path_ops variant. No row-data backfill:
// this branch is dev-only; the trigger populates search_text /
// search_tsv on the next write to each row.
await sql`DROP INDEX IF EXISTS idx_base_rows_cells_gin`.execute(db);
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_cells_gin_path_ops
ON base_rows USING gin (cells jsonb_path_ops)
WHERE deleted_at IS NULL
`.execute(db);
// Complementary default-opclass GIN so the `?` / `?|` / `?&` key-existence
// operators are index-satisfiable — `jsonb_path_ops` above only covers
// `@>`. Type-conversion and cell-GC paths filter `cells ? propertyId`;
// without this the planner falls back to SEQ SCAN (~900ms on 100k rows).
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_cells_gin_keys
ON base_rows USING gin (cells)
WHERE deleted_at IS NULL
`.execute(db);
// Workhorse for paginated list: (base_id, position, id) on live rows.
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_base_alive
ON base_rows (base_id, position COLLATE "C", id)
WHERE deleted_at IS NULL
`.execute(db);
// Common "most recently edited" sort.
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_base_updated
ON base_rows (base_id, updated_at DESC)
WHERE deleted_at IS NULL
`.execute(db);
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_base_created
ON base_rows (base_id, created_at DESC)
WHERE deleted_at IS NULL
`.execute(db);
// Fulltext + trigram search indexes.
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_search_tsv
ON base_rows USING gin (search_tsv)
WHERE deleted_at IS NULL
`.execute(db);
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_search_trgm
ON base_rows USING gin (search_text gin_trgm_ops)
WHERE deleted_at IS NULL
`.execute(db);
// Tenant-scoped scans defense-in-depth.
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_workspace
ON base_rows (workspace_id, base_id)
`.execute(db);
// Live properties per base (deleted_at partial).
await sql`
CREATE INDEX IF NOT EXISTS idx_base_properties_base_alive
ON base_properties (base_id, position COLLATE "C", id)
WHERE deleted_at IS NULL
`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
// --- Drop new indexes -------------------------------------------------
await sql`DROP INDEX IF EXISTS idx_base_properties_base_alive`.execute(db);
await sql`DROP INDEX IF EXISTS idx_base_rows_workspace`.execute(db);
await sql`DROP INDEX IF EXISTS idx_base_rows_search_trgm`.execute(db);
await sql`DROP INDEX IF EXISTS idx_base_rows_search_tsv`.execute(db);
await sql`DROP INDEX IF EXISTS idx_base_rows_base_created`.execute(db);
await sql`DROP INDEX IF EXISTS idx_base_rows_base_updated`.execute(db);
await sql`DROP INDEX IF EXISTS idx_base_rows_base_alive`.execute(db);
await sql`DROP INDEX IF EXISTS idx_base_rows_cells_gin_keys`.execute(db);
await sql`DROP INDEX IF EXISTS idx_base_rows_cells_gin_path_ops`.execute(db);
// Restore the original GIN that the initial bases migration created.
await sql`
CREATE INDEX IF NOT EXISTS idx_base_rows_cells_gin
ON base_rows USING gin (cells)
`.execute(db);
// --- Drop trigger, trigger fn, helpers --------------------------------
await sql`DROP TRIGGER IF EXISTS base_rows_search_update ON base_rows`.execute(db);
await sql`DROP FUNCTION IF EXISTS base_rows_search_trigger()`.execute(db);
await sql`DROP FUNCTION IF EXISTS build_base_row_search_text(jsonb, uuid)`.execute(db);
await sql`DROP FUNCTION IF EXISTS jsonb_set_many(jsonb, jsonb)`.execute(db);
await sql`DROP FUNCTION IF EXISTS base_cell_array(jsonb, uuid)`.execute(db);
await sql`DROP FUNCTION IF EXISTS base_cell_bool(jsonb, uuid)`.execute(db);
await sql`DROP FUNCTION IF EXISTS base_cell_timestamptz(jsonb, uuid)`.execute(db);
await sql`DROP FUNCTION IF EXISTS base_cell_numeric(jsonb, uuid)`.execute(db);
await sql`DROP FUNCTION IF EXISTS base_cell_text(jsonb, uuid)`.execute(db);
// --- Drop columns -----------------------------------------------------
await sql`ALTER TABLE bases DROP COLUMN IF EXISTS schema_version`.execute(db);
await sql`
ALTER TABLE base_properties
DROP COLUMN IF EXISTS deleted_at,
DROP COLUMN IF EXISTS schema_version
`.execute(db);
await sql`
ALTER TABLE base_rows
DROP COLUMN IF EXISTS search_tsv,
DROP COLUMN IF EXISTS search_text
`.execute(db);
}
@@ -0,0 +1,24 @@
import { type Kysely, sql } from 'kysely';
/*
* Adds `pending_type` / `pending_type_options` to `base_properties` so
* async type conversions can run without flipping the live type prematurely.
* The worker swaps them onto `type` / `type_options` in the same
* transaction that bumps schema_version, so clients never observe raw IDs
* under a post-conversion type.
*/
export async function up(db: Kysely<any>): Promise<void> {
await sql`
ALTER TABLE base_properties
ADD COLUMN IF NOT EXISTS pending_type varchar,
ADD COLUMN IF NOT EXISTS pending_type_options jsonb
`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`
ALTER TABLE base_properties
DROP COLUMN IF EXISTS pending_type_options,
DROP COLUMN IF EXISTS pending_type
`.execute(db);
}
@@ -15,14 +15,15 @@ export class BasePropertyRepo {
async findById( async findById(
propertyId: string, propertyId: string,
opts?: { trx?: KyselyTransaction }, opts?: { trx?: KyselyTransaction; includeDeleted?: boolean },
): Promise<BaseProperty | undefined> { ): Promise<BaseProperty | undefined> {
const db = dbOrTx(this.db, opts?.trx); const db = dbOrTx(this.db, opts?.trx);
return db let qb = db
.selectFrom('baseProperties') .selectFrom('baseProperties')
.selectAll() .selectAll()
.where('id', '=', propertyId) .where('id', '=', propertyId);
.executeTakeFirst() as Promise<BaseProperty | undefined>; if (!opts?.includeDeleted) qb = qb.where('deletedAt', 'is', null);
return qb.executeTakeFirst() as Promise<BaseProperty | undefined>;
} }
async findByBaseId( async findByBaseId(
@@ -34,6 +35,7 @@ export class BasePropertyRepo {
.selectFrom('baseProperties') .selectFrom('baseProperties')
.selectAll() .selectAll()
.where('baseId', '=', baseId) .where('baseId', '=', baseId)
.where('deletedAt', 'is', null)
.orderBy('position', 'asc') .orderBy('position', 'asc')
.execute() as Promise<BaseProperty[]>; .execute() as Promise<BaseProperty[]>;
} }
@@ -78,7 +80,19 @@ export class BasePropertyRepo {
.execute(); .execute();
} }
async deleteProperty( async softDelete(
propertyId: string,
trx?: KyselyTransaction,
): Promise<void> {
const db = dbOrTx(this.db, trx);
await db
.updateTable('baseProperties')
.set({ deletedAt: new Date(), updatedAt: new Date() })
.where('id', '=', propertyId)
.execute();
}
async hardDelete(
propertyId: string, propertyId: string,
trx?: KyselyTransaction, trx?: KyselyTransaction,
): Promise<void> { ): Promise<void> {
@@ -88,4 +102,60 @@ export class BasePropertyRepo {
.where('id', '=', propertyId) .where('id', '=', propertyId)
.execute(); .execute();
} }
async bumpSchemaVersion(
propertyId: string,
trx?: KyselyTransaction,
): Promise<void> {
const db = dbOrTx(this.db, trx);
await db
.updateTable('baseProperties')
.set({
schemaVersion: sql`schema_version + 1`,
updatedAt: new Date(),
})
.where('id', '=', propertyId)
.execute();
}
/*
* Promotes `pending_type` / `pending_type_options` onto the live `type` /
* `type_options` columns and clears the pending pair. No-op if no
* conversion was pending. Caller is responsible for doing this inside the
* same transaction as the cell rewrite so readers never see a
* half-converted state.
*/
async commitPendingTypeChange(
propertyId: string,
trx?: KyselyTransaction,
): Promise<void> {
const db = dbOrTx(this.db, trx);
await db
.updateTable('baseProperties')
.set({
type: sql`coalesce(pending_type, type)`,
typeOptions: sql`coalesce(pending_type_options, type_options)`,
pendingType: null,
pendingTypeOptions: null,
updatedAt: new Date(),
})
.where('id', '=', propertyId)
.execute();
}
async clearPendingTypeChange(
propertyId: string,
trx?: KyselyTransaction,
): Promise<void> {
const db = dbOrTx(this.db, trx);
await db
.updateTable('baseProperties')
.set({
pendingType: null,
pendingTypeOptions: null,
updatedAt: new Date(),
})
.where('id', '=', propertyId)
.execute();
}
} }
@@ -7,21 +7,39 @@ import {
InsertableBaseRow, InsertableBaseRow,
} from '@docmost/db/types/entity.types'; } from '@docmost/db/types/entity.types';
import { PaginationOptions } from '@docmost/db/pagination/pagination-options'; import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
import { executeWithCursorPagination } from '@docmost/db/pagination/cursor-pagination'; import {
import { sql, SelectQueryBuilder, SqlBool } from 'kysely'; CursorPaginationResult,
import { DB } from '@docmost/db/types/db'; executeWithCursorPagination,
} from '@docmost/db/pagination/cursor-pagination';
import { sql, SqlBool } from 'kysely';
import {
FilterNode,
PropertySchema,
SearchSpec,
SortSpec,
runListQuery,
} from '../../../core/base/engine';
const SYSTEM_COLUMN_MAP: Record<string, string> = { type RepoOpts = { trx?: KyselyTransaction };
createdAt: 'createdAt', type WorkspaceOpts = { workspaceId: string } & RepoOpts;
lastEditedAt: 'updatedAt',
lastEditedBy: 'lastUpdatedById',
};
const ARRAY_TYPES = new Set(['multiSelect', 'person', 'file']); // Columns that make up the public `BaseRow` shape.
// `search_text` and `search_tsv` are internal fulltext-index columns
function escapeIlike(value: string): string { // maintained by a trigger — they must never leak into API responses or
return value.replace(/[%_\\]/g, '\\$&'); // socket payloads. Every SELECT/RETURNING path in this repo references
} // this constant.
const BASE_ROW_COLUMNS = [
'id',
'baseId',
'cells',
'position',
'creatorId',
'lastUpdatedById',
'workspaceId',
'createdAt',
'updatedAt',
'deletedAt',
] as const;
@Injectable() @Injectable()
export class BaseRowRepo { export class BaseRowRepo {
@@ -29,54 +47,82 @@ export class BaseRowRepo {
async findById( async findById(
rowId: string, rowId: string,
opts?: { trx?: KyselyTransaction }, opts: WorkspaceOpts,
): Promise<BaseRow | undefined> { ): Promise<BaseRow | undefined> {
const db = dbOrTx(this.db, opts?.trx); const db = dbOrTx(this.db, opts.trx);
return db return (await db
.selectFrom('baseRows') .selectFrom('baseRows')
.selectAll() .select(BASE_ROW_COLUMNS)
.where('id', '=', rowId) .where('id', '=', rowId)
.where('workspaceId', '=', opts.workspaceId)
.where('deletedAt', 'is', null) .where('deletedAt', 'is', null)
.executeTakeFirst() as Promise<BaseRow | undefined>; .executeTakeFirst()) as BaseRow | undefined;
} }
async findByBaseId( async list(opts: {
baseId: string, baseId: string;
pagination: PaginationOptions, workspaceId: string;
opts?: { trx?: KyselyTransaction }, filter?: FilterNode;
) { sorts?: SortSpec[];
const db = dbOrTx(this.db, opts?.trx); search?: SearchSpec;
schema: PropertySchema;
pagination: PaginationOptions;
trx?: KyselyTransaction;
}): Promise<CursorPaginationResult<BaseRow>> {
const db = dbOrTx(this.db, opts.trx);
const query = db const base = db
.selectFrom('baseRows') .selectFrom('baseRows')
.selectAll() .select(BASE_ROW_COLUMNS)
.where('baseId', '=', baseId) .where('baseId', '=', opts.baseId)
.where('workspaceId', '=', opts.workspaceId)
.where('deletedAt', 'is', null); .where('deletedAt', 'is', null);
return executeWithCursorPagination(query, { const hasFilterSortSearch =
perPage: pagination.limit, !!opts.filter || (opts.sorts && opts.sorts.length > 0) || !!opts.search;
cursor: pagination.cursor,
beforeCursor: pagination.beforeCursor, if (!hasFilterSortSearch) {
fields: [ // Fast path: keyset-paginated list ordered by (position COLLATE "C", id)
{ expression: 'position', direction: 'asc' }, // to match idx_base_rows_base_alive. Without the collation hint the
{ expression: 'id', direction: 'asc' }, // planner falls back to a Sort node on every page.
], return executeWithCursorPagination(base as any, {
parseCursor: (cursor) => ({ perPage: opts.pagination.limit,
position: cursor.position, cursor: opts.pagination.cursor,
id: cursor.id, beforeCursor: opts.pagination.beforeCursor,
}), fields: [
{
expression: sql`position COLLATE "C"`,
direction: 'asc',
key: 'position',
},
{ expression: 'id', direction: 'asc', key: 'id' },
],
parseCursor: (c) => ({
position: c.position,
id: c.id,
}),
} as any) as unknown as Promise<CursorPaginationResult<BaseRow>>;
}
return runListQuery(base as any, {
filter: opts.filter,
sorts: opts.sorts,
search: opts.search,
schema: opts.schema,
pagination: opts.pagination,
}); });
} }
async getLastPosition( async getLastPosition(
baseId: string, baseId: string,
trx?: KyselyTransaction, opts: WorkspaceOpts,
): Promise<string | null> { ): Promise<string | null> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts.trx);
const result = await db const result = await db
.selectFrom('baseRows') .selectFrom('baseRows')
.select('position') .select('position')
.where('baseId', '=', baseId) .where('baseId', '=', baseId)
.where('workspaceId', '=', opts.workspaceId)
.where('deletedAt', 'is', null) .where('deletedAt', 'is', null)
.orderBy(sql`position COLLATE "C"`, sql`DESC`) .orderBy(sql`position COLLATE "C"`, sql`DESC`)
.limit(1) .limit(1)
@@ -86,425 +132,199 @@ export class BaseRowRepo {
async insertRow( async insertRow(
row: InsertableBaseRow, row: InsertableBaseRow,
trx?: KyselyTransaction, opts?: RepoOpts,
): Promise<BaseRow> { ): Promise<BaseRow> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts?.trx);
return db return (await db
.insertInto('baseRows') .insertInto('baseRows')
.values(row) .values(row)
.returningAll() .returning(BASE_ROW_COLUMNS)
.executeTakeFirstOrThrow() as Promise<BaseRow>; .executeTakeFirstOrThrow()) as BaseRow;
} }
/*
* Merges `patch` into the row's cells via `jsonb_set_many` and returns
* the updated row (public columns only — search_text/search_tsv are
* excluded from RETURNING). Single round-trip; replaces the old
* "updateCells + findById" two-query dance.
*/
async updateCells( async updateCells(
rowId: string, rowId: string,
cells: Record<string, unknown>, patch: Record<string, unknown>,
userId?: string, opts: {
trx?: KyselyTransaction, baseId: string;
): Promise<void> { workspaceId: string;
const db = dbOrTx(this.db, trx); actorId?: string;
await db trx?: KyselyTransaction;
},
): Promise<BaseRow | undefined> {
const db = dbOrTx(this.db, opts.trx);
// Cast through text because postgres.js auto-detects a JSON-shaped
// string as jsonb and re-encodes it, producing a jsonb *string* instead
// of an object — which `jsonb_set_many` then treats as a no-op.
const patchJson = JSON.stringify(patch);
return (await db
.updateTable('baseRows') .updateTable('baseRows')
.set({ .set({
cells: sql`cells || ${cells}`, cells: sql`jsonb_set_many(cells, ${patchJson}::text::jsonb)`,
updatedAt: new Date(), updatedAt: new Date(),
lastUpdatedById: userId ?? null, lastUpdatedById: opts.actorId ?? null,
}) })
.where('id', '=', rowId) .where('id', '=', rowId)
.where('baseId', '=', opts.baseId)
.where('workspaceId', '=', opts.workspaceId)
.where('deletedAt', 'is', null) .where('deletedAt', 'is', null)
.execute(); .returning(BASE_ROW_COLUMNS)
.executeTakeFirst()) as BaseRow | undefined;
} }
async updatePosition( async updatePosition(
rowId: string, rowId: string,
position: string, position: string,
trx?: KyselyTransaction, opts: {
baseId: string;
workspaceId: string;
trx?: KyselyTransaction;
},
): Promise<void> { ): Promise<void> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts.trx);
await db await db
.updateTable('baseRows') .updateTable('baseRows')
.set({ position, updatedAt: new Date() }) .set({ position, updatedAt: new Date() })
.where('id', '=', rowId) .where('id', '=', rowId)
.where('baseId', '=', opts.baseId)
.where('workspaceId', '=', opts.workspaceId)
.where('deletedAt', 'is', null)
.execute(); .execute();
} }
async softDelete(rowId: string, trx?: KyselyTransaction): Promise<void> { async softDelete(
const db = dbOrTx(this.db, trx); rowId: string,
opts: {
baseId: string;
workspaceId: string;
trx?: KyselyTransaction;
},
): Promise<void> {
const db = dbOrTx(this.db, opts.trx);
await db await db
.updateTable('baseRows') .updateTable('baseRows')
.set({ deletedAt: new Date() }) .set({ deletedAt: new Date() })
.where('id', '=', rowId) .where('id', '=', rowId)
.where('baseId', '=', opts.baseId)
.where('workspaceId', '=', opts.workspaceId)
.where('deletedAt', 'is', null)
.execute(); .execute();
} }
async removeCellKey( async removeCellKey(
baseId: string, baseId: string,
propertyId: string, propertyId: string,
trx?: KyselyTransaction, opts: WorkspaceOpts,
): Promise<void> { ): Promise<void> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts.trx);
await db await db
.updateTable('baseRows') .updateTable('baseRows')
.set({ .set({
cells: sql`cells - ${propertyId}`, cells: sql`cells - ${propertyId}::text`,
updatedAt: new Date(), updatedAt: new Date(),
}) })
.where('baseId', '=', baseId) .where('baseId', '=', baseId)
.where('workspaceId', '=', opts.workspaceId)
.execute(); .execute();
} }
async findAllByBaseId( /*
* Streams every live row of a base in deterministic order via keyset
* pagination so async jobs (type-conversion, cell-gc, export) can process
* large bases without loading the full set into memory.
*
* `withCellKey` restricts the scan to rows whose cell jsonb contains
* that top-level key. Type-conversion callers pass the property ID so
* we don't drag 100k empty rows through Node just to rewrite a dozen.
*/
async *streamByBaseId(
baseId: string, baseId: string,
trx?: KyselyTransaction, opts: {
): Promise<BaseRow[]> { workspaceId: string;
const db = dbOrTx(this.db, trx); chunkSize?: number;
return db trx?: KyselyTransaction;
.selectFrom('baseRows') withCellKey?: string;
.selectAll() },
.where('baseId', '=', baseId) ): AsyncGenerator<BaseRow[], void, void> {
.where('deletedAt', 'is', null) const chunkSize = opts.chunkSize ?? 1000;
.execute() as Promise<BaseRow[]>; const db = dbOrTx(this.db, opts.trx);
let afterPosition: string | null = null;
let afterId: string | null = null;
while (true) {
let qb = db
.selectFrom('baseRows')
.select(BASE_ROW_COLUMNS)
.where('baseId', '=', baseId)
.where('workspaceId', '=', opts.workspaceId)
.where('deletedAt', 'is', null)
.orderBy(sql`position COLLATE "C"`, 'asc')
.orderBy('id', 'asc')
.limit(chunkSize);
if (opts.withCellKey) {
qb = qb.where(sql<SqlBool>`cells ? ${opts.withCellKey}`);
}
if (afterPosition !== null && afterId !== null) {
qb = qb.where((eb) =>
eb.or([
eb(sql`position COLLATE "C"`, '>', afterPosition!),
eb.and([
eb(sql`position COLLATE "C"`, '=', afterPosition!),
eb('id', '>', afterId!),
]),
]),
);
}
const chunk = (await qb.execute()) as BaseRow[];
if (chunk.length === 0) return;
yield chunk;
if (chunk.length < chunkSize) return;
const last = chunk[chunk.length - 1];
afterPosition = last.position;
afterId = last.id;
}
} }
/*
* Real batch: one `UPDATE ... FROM (SELECT unnest($ids), unnest($patches))`
* per call. Callers chunk (typically 1000 per call) from inside a BullMQ
* job. `cells` is merged via `jsonb_set_many` so only touched subtrees
* rewrite.
*/
async batchUpdateCells( async batchUpdateCells(
updates: Array<{ id: string; cells: Record<string, unknown> }>, updates: Array<{ id: string; patch: Record<string, unknown> }>,
trx?: KyselyTransaction, opts: {
baseId: string;
workspaceId: string;
actorId?: string;
trx?: KyselyTransaction;
},
): Promise<void> { ): Promise<void> {
const db = dbOrTx(this.db, trx); if (updates.length === 0) return;
for (const update of updates) { const db = dbOrTx(this.db, opts.trx);
await db
.updateTable('baseRows')
.set({
cells: sql`cells || ${update.cells}`,
updatedAt: new Date(),
})
.where('id', '=', update.id)
.execute();
}
}
async findByBaseIdFiltered( const ids = updates.map((u) => u.id);
baseId: string, const patches = updates.map((u) => JSON.stringify(u.patch));
filters: Array<{ propertyId: string; operator: string; value?: unknown }>,
sorts: Array<{ propertyId: string; direction: string }>,
propertyTypeMap: Map<string, string>,
pagination: PaginationOptions,
opts?: { trx?: KyselyTransaction },
) {
const db = dbOrTx(this.db, opts?.trx);
let query = db await sql`
.selectFrom('baseRows') UPDATE base_rows AS r
.selectAll() SET cells = jsonb_set_many(r.cells, u.patch::jsonb),
.where('baseId', '=', baseId) updated_at = now(),
.where('deletedAt', 'is', null) as SelectQueryBuilder<DB, 'baseRows', any>; last_updated_by_id = coalesce(${opts.actorId ?? null}, r.last_updated_by_id)
FROM unnest(${ids}::uuid[], ${patches}::text[]) AS u(row_id, patch)
// Apply filters WHERE r.id = u.row_id
for (const filter of filters) { AND r.base_id = ${opts.baseId}
query = this.applyFilter(query, filter, propertyTypeMap); AND r.workspace_id = ${opts.workspaceId}
} AND r.deleted_at IS NULL
`.execute(db);
// Build cursor-compatible sort fields.
// COALESCE sort expressions so NULLs never reach the cursor encoder/comparator.
// ASC NULLS LAST → COALESCE(expr, <high sentinel>)
// DESC NULLS LAST → COALESCE(expr, <low sentinel>)
const sortMeta: Array<{
alias: string;
expression: ReturnType<typeof sql>;
direction: 'asc' | 'desc';
isNumeric: boolean;
}> = [];
for (let i = 0; i < sorts.length; i++) {
const sort = sorts[i];
const type = propertyTypeMap.get(sort.propertyId);
if (!type) continue;
const dir = (sort.direction === 'desc' ? 'desc' : 'asc') as 'asc' | 'desc';
const alias = `s${i}`;
let expression: ReturnType<typeof sql>;
let isNumeric = false;
const systemCol = SYSTEM_COLUMN_MAP[type];
if (systemCol) {
// System columns (createdAt, updatedAt) are NOT NULL — no COALESCE needed
expression = sql`"${sql.raw(systemCol)}"`;
} else if (type === 'number') {
isNumeric = true;
const sentinel = dir === 'asc' ? "'Infinity'::numeric" : "'-Infinity'::numeric";
expression = sql`COALESCE((cells->>'${sql.raw(sort.propertyId)}')::numeric, ${sql.raw(sentinel)})`;
} else {
// Text, date, select, etc.
const sentinel = dir === 'asc' ? 'chr(1114111)' : "''";
expression = sql`COALESCE(cells->>'${sql.raw(sort.propertyId)}', ${sql.raw(sentinel)})`;
}
sortMeta.push({ alias, expression, direction: dir, isNumeric });
query = query.select(expression.as(alias)) as any;
}
// Cursor pagination fields: sort aliases + position + id tiebreakers.
// executeWithCursorPagination applies ORDER BY and builds the keyset WHERE from these.
const fields = [
...sortMeta.map(({ alias, expression, direction }) => ({
expression,
direction,
key: alias,
})),
{ expression: 'position' as any, direction: 'asc' as const, key: 'position' },
{ expression: 'id' as any, direction: 'asc' as const, key: 'id' },
];
return executeWithCursorPagination(query as any, {
perPage: pagination.limit,
cursor: pagination.cursor,
beforeCursor: pagination.beforeCursor,
fields: fields as any,
encodeCursor: (values: Array<[string, unknown]>) => {
const cursor = new URLSearchParams();
for (const [key, value] of values) {
if (value === null || value === undefined) {
cursor.set(key, '__null__');
} else if (value instanceof Date) {
cursor.set(key, value.toISOString());
} else {
cursor.set(key, String(value));
}
}
return Buffer.from(cursor.toString(), 'utf8').toString('base64url');
},
decodeCursor: (cursorStr: string, fieldNames: string[]) => {
const parsed = new URLSearchParams(
Buffer.from(cursorStr, 'base64url').toString('utf8'),
);
const result: Record<string, string> = {};
for (const name of fieldNames) {
result[name] = parsed.get(name) ?? '';
}
return result;
},
parseCursor: (decoded: any) => {
const result: Record<string, unknown> = {};
for (const { alias, isNumeric } of sortMeta) {
const val = decoded[alias];
if (val === '__null__') {
result[alias] = null;
} else {
result[alias] = isNumeric ? parseFloat(val) : val;
}
}
result.position = decoded.position;
result.id = decoded.id;
return result;
},
} as any);
}
private applyFilter(
query: SelectQueryBuilder<DB, 'baseRows', any>,
filter: { propertyId: string; operator: string; value?: unknown },
propertyTypeMap: Map<string, string>,
): SelectQueryBuilder<DB, 'baseRows', any> {
const { propertyId, operator, value } = filter;
const propertyType = propertyTypeMap.get(propertyId);
if (!propertyType) return query;
// System property -> use actual column
const systemCol = SYSTEM_COLUMN_MAP[propertyType];
if (systemCol) {
return this.applyColumnFilter(query, systemCol, operator, value, propertyType);
}
const isArray = ARRAY_TYPES.has(propertyType);
// isEmpty / isNotEmpty don't need a value
if (operator === 'isEmpty') {
if (isArray) {
return query.where(({ or, eb }) =>
or([
eb(sql.raw(`cells->'${propertyId}'`), 'is', null),
eb(sql`jsonb_array_length(cells->'${sql.raw(propertyId)}')`, '=', 0),
]),
);
}
return query.where(({ or, eb }) =>
or([
eb(sql.raw(`cells->>'${propertyId}'`), 'is', null),
eb(sql.raw(`cells->>'${propertyId}'`), '=', ''),
]),
);
}
if (operator === 'isNotEmpty') {
if (isArray) {
return query
.where(sql.raw(`cells->'${propertyId}'`), 'is not', null)
.where(sql`jsonb_array_length(cells->'${sql.raw(propertyId)}')`, '>', 0);
}
return query
.where(sql.raw(`cells->>'${propertyId}'`), 'is not', null)
.where(sql.raw(`cells->>'${propertyId}'`), '!=', '');
}
if (value === undefined || value === null) return query;
// contains / notContains - text search
if (operator === 'contains') {
return query.where(
sql.raw(`cells->>'${propertyId}'`),
'ilike',
`%${escapeIlike(String(value))}%`,
);
}
if (operator === 'notContains') {
return query.where(({ or, eb }) =>
or([
eb(sql.raw(`cells->>'${propertyId}'`), 'is', null),
eb(
sql.raw(`cells->>'${propertyId}'`),
'not ilike',
`%${escapeIlike(String(value))}%`,
),
]),
);
}
// equals / notEquals
if (operator === 'equals') {
if (isArray) {
return query.where(
sql<SqlBool>`cells->'${sql.raw(propertyId)}' @> ${JSON.stringify([value])}::jsonb`,
);
}
if (propertyType === 'number') {
return query.where(
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::numeric = ${Number(value)}`,
);
}
if (propertyType === 'checkbox') {
return query.where(
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::boolean = ${Boolean(value)}`,
);
}
return query.where(sql.raw(`cells->>'${propertyId}'`), '=', String(value));
}
if (operator === 'notEquals') {
if (isArray) {
return query.where(({ or, eb }) =>
or([
eb(sql.raw(`cells->'${propertyId}'`), 'is', null),
sql<SqlBool>`NOT (cells->'${sql.raw(propertyId)}' @> ${JSON.stringify([value])}::jsonb)`,
]),
);
}
if (propertyType === 'number') {
return query.where(
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::numeric != ${Number(value)}`,
);
}
if (propertyType === 'checkbox') {
return query.where(
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::boolean != ${Boolean(value)}`,
);
}
return query.where(({ or, eb }) =>
or([
eb(sql.raw(`cells->>'${propertyId}'`), 'is', null),
eb(sql.raw(`cells->>'${propertyId}'`), '!=', String(value)),
]),
);
}
// greaterThan / lessThan - number
if (operator === 'greaterThan') {
return query.where(
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::numeric > ${Number(value)}`,
);
}
if (operator === 'lessThan') {
return query.where(
sql<SqlBool>`(cells->>'${sql.raw(propertyId)}')::numeric < ${Number(value)}`,
);
}
// before / after - date
if (operator === 'before') {
return query.where(sql.raw(`cells->>'${propertyId}'`), '<', String(value));
}
if (operator === 'after') {
return query.where(sql.raw(`cells->>'${propertyId}'`), '>', String(value));
}
return query;
}
private applyColumnFilter(
query: SelectQueryBuilder<DB, 'baseRows', any>,
column: string,
operator: string,
value: unknown,
propertyType: string,
): SelectQueryBuilder<DB, 'baseRows', any> {
if (operator === 'isEmpty') {
return query.where(sql.raw(`"${column}"`), 'is', null);
}
if (operator === 'isNotEmpty') {
return query.where(sql.raw(`"${column}"`), 'is not', null);
}
if (value === undefined || value === null) return query;
if (operator === 'equals') {
return query.where(sql.raw(`"${column}"`), '=', value);
}
if (operator === 'notEquals') {
return query.where(({ or, eb }) =>
or([
eb(sql.raw(`"${column}"`), 'is', null),
eb(sql.raw(`"${column}"`), '!=', value),
]),
);
}
if (operator === 'before') {
return query.where(sql.raw(`"${column}"`), '<', value);
}
if (operator === 'after') {
return query.where(sql.raw(`"${column}"`), '>', value);
}
return query;
}
private applySort(
query: SelectQueryBuilder<DB, 'baseRows', any>,
sort: { propertyId: string; direction: string },
propertyTypeMap: Map<string, string>,
): SelectQueryBuilder<DB, 'baseRows', any> {
const { propertyId, direction } = sort;
const propertyType = propertyTypeMap.get(propertyId);
if (!propertyType) return query;
const dir = direction === 'desc' ? 'desc' : 'asc';
// System property -> use actual column
const systemCol = SYSTEM_COLUMN_MAP[propertyType];
if (systemCol) {
return query.orderBy(sql.raw(`"${systemCol}"`), sql`${sql.raw(dir)} NULLS LAST`);
}
// Number properties: cast to numeric for proper numeric ordering
if (propertyType === 'number') {
return query.orderBy(
sql`(cells->>'${sql.raw(propertyId)}')::numeric`,
sql`${sql.raw(dir)} NULLS LAST`,
);
}
// All other properties: use text extraction
return query.orderBy(
sql.raw(`cells->>'${propertyId}'`),
sql`${sql.raw(dir)} NULLS LAST`,
);
} }
} }
@@ -9,57 +9,64 @@ import {
} from '@docmost/db/types/entity.types'; } from '@docmost/db/types/entity.types';
import { sql } from 'kysely'; import { sql } from 'kysely';
type RepoOpts = { trx?: KyselyTransaction };
type WorkspaceOpts = { workspaceId: string } & RepoOpts;
@Injectable() @Injectable()
export class BaseViewRepo { export class BaseViewRepo {
constructor(@InjectKysely() private readonly db: KyselyDB) {} constructor(@InjectKysely() private readonly db: KyselyDB) {}
async findById( async findById(
viewId: string, viewId: string,
opts?: { trx?: KyselyTransaction }, opts: WorkspaceOpts,
): Promise<BaseView | undefined> { ): Promise<BaseView | undefined> {
const db = dbOrTx(this.db, opts?.trx); const db = dbOrTx(this.db, opts.trx);
return db return db
.selectFrom('baseViews') .selectFrom('baseViews')
.selectAll() .selectAll()
.where('id', '=', viewId) .where('id', '=', viewId)
.where('workspaceId', '=', opts.workspaceId)
.executeTakeFirst() as Promise<BaseView | undefined>; .executeTakeFirst() as Promise<BaseView | undefined>;
} }
async findByBaseId( async findByBaseId(
baseId: string, baseId: string,
opts?: { trx?: KyselyTransaction }, opts: WorkspaceOpts,
): Promise<BaseView[]> { ): Promise<BaseView[]> {
const db = dbOrTx(this.db, opts?.trx); const db = dbOrTx(this.db, opts.trx);
return db return db
.selectFrom('baseViews') .selectFrom('baseViews')
.selectAll() .selectAll()
.where('baseId', '=', baseId) .where('baseId', '=', baseId)
.where('workspaceId', '=', opts.workspaceId)
.orderBy('position', 'asc') .orderBy('position', 'asc')
.execute() as Promise<BaseView[]>; .execute() as Promise<BaseView[]>;
} }
async countByBaseId( async countByBaseId(
baseId: string, baseId: string,
trx?: KyselyTransaction, opts: WorkspaceOpts,
): Promise<number> { ): Promise<number> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts.trx);
const result = await db const result = await db
.selectFrom('baseViews') .selectFrom('baseViews')
.select((eb) => eb.fn.countAll<number>().as('count')) .select((eb) => eb.fn.countAll<number>().as('count'))
.where('baseId', '=', baseId) .where('baseId', '=', baseId)
.where('workspaceId', '=', opts.workspaceId)
.executeTakeFirstOrThrow(); .executeTakeFirstOrThrow();
return Number(result.count); return Number(result.count);
} }
async getLastPosition( async getLastPosition(
baseId: string, baseId: string,
trx?: KyselyTransaction, opts: WorkspaceOpts,
): Promise<string | null> { ): Promise<string | null> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts.trx);
const result = await db const result = await db
.selectFrom('baseViews') .selectFrom('baseViews')
.select('position') .select('position')
.where('baseId', '=', baseId) .where('baseId', '=', baseId)
.where('workspaceId', '=', opts.workspaceId)
.orderBy(sql`position COLLATE "C"`, sql`DESC`) .orderBy(sql`position COLLATE "C"`, sql`DESC`)
.limit(1) .limit(1)
.executeTakeFirst(); .executeTakeFirst();
@@ -68,9 +75,9 @@ export class BaseViewRepo {
async insertView( async insertView(
view: InsertableBaseView, view: InsertableBaseView,
trx?: KyselyTransaction, opts?: RepoOpts,
): Promise<BaseView> { ): Promise<BaseView> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts?.trx);
return db return db
.insertInto('baseViews') .insertInto('baseViews')
.values(view) .values(view)
@@ -81,24 +88,26 @@ export class BaseViewRepo {
async updateView( async updateView(
viewId: string, viewId: string,
data: UpdatableBaseView, data: UpdatableBaseView,
trx?: KyselyTransaction, opts: WorkspaceOpts,
): Promise<void> { ): Promise<void> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts.trx);
await db await db
.updateTable('baseViews') .updateTable('baseViews')
.set({ ...data, updatedAt: new Date() }) .set({ ...data, updatedAt: new Date() })
.where('id', '=', viewId) .where('id', '=', viewId)
.where('workspaceId', '=', opts.workspaceId)
.execute(); .execute();
} }
async deleteView( async deleteView(
viewId: string, viewId: string,
trx?: KyselyTransaction, opts: WorkspaceOpts,
): Promise<void> { ): Promise<void> {
const db = dbOrTx(this.db, trx); const db = dbOrTx(this.db, opts.trx);
await db await db
.deleteFrom('baseViews') .deleteFrom('baseViews')
.where('id', '=', viewId) .where('id', '=', viewId)
.where('workspaceId', '=', opts.workspaceId)
.execute(); .execute();
} }
} }
@@ -9,7 +9,7 @@ import {
} from '@docmost/db/types/entity.types'; } from '@docmost/db/types/entity.types';
import { PaginationOptions } from '@docmost/db/pagination/pagination-options'; import { PaginationOptions } from '@docmost/db/pagination/pagination-options';
import { executeWithCursorPagination } from '@docmost/db/pagination/cursor-pagination'; import { executeWithCursorPagination } from '@docmost/db/pagination/cursor-pagination';
import { ExpressionBuilder } from 'kysely'; import { ExpressionBuilder, sql } from 'kysely';
import { DB } from '@docmost/db/types/db'; import { DB } from '@docmost/db/types/db';
import { jsonArrayFrom } from 'kysely/helpers/postgres'; import { jsonArrayFrom } from 'kysely/helpers/postgres';
@@ -120,6 +120,23 @@ export class BaseRepo {
.execute(); .execute();
} }
async bumpSchemaVersion(
baseId: string,
trx?: KyselyTransaction,
): Promise<number> {
const db = dbOrTx(this.db, trx);
const result = await db
.updateTable('bases')
.set({
schemaVersion: sql`schema_version + 1`,
updatedAt: new Date(),
})
.where('id', '=', baseId)
.returning('schemaVersion')
.executeTakeFirst();
return result?.schemaVersion ?? 0;
}
private withProperties(eb: ExpressionBuilder<DB, 'bases'>) { private withProperties(eb: ExpressionBuilder<DB, 'bases'>) {
return jsonArrayFrom( return jsonArrayFrom(
eb eb
+7
View File
@@ -434,6 +434,7 @@ export interface Bases {
createdAt: Generated<Timestamp>; createdAt: Generated<Timestamp>;
updatedAt: Generated<Timestamp>; updatedAt: Generated<Timestamp>;
deletedAt: Timestamp | null; deletedAt: Timestamp | null;
schemaVersion: Generated<number>;
} }
export interface BaseProperties { export interface BaseProperties {
@@ -443,10 +444,14 @@ export interface BaseProperties {
type: string; type: string;
position: string; position: string;
typeOptions: Json | null; typeOptions: Json | null;
pendingType: string | null;
pendingTypeOptions: Json | null;
isPrimary: Generated<boolean>; isPrimary: Generated<boolean>;
workspaceId: string; workspaceId: string;
createdAt: Generated<Timestamp>; createdAt: Generated<Timestamp>;
updatedAt: Generated<Timestamp>; updatedAt: Generated<Timestamp>;
schemaVersion: Generated<number>;
deletedAt: Timestamp | null;
} }
export interface BaseRows { export interface BaseRows {
@@ -460,6 +465,8 @@ export interface BaseRows {
createdAt: Generated<Timestamp>; createdAt: Generated<Timestamp>;
updatedAt: Generated<Timestamp>; updatedAt: Generated<Timestamp>;
deletedAt: Timestamp | null; deletedAt: Timestamp | null;
searchText: string | null;
searchTsv: string | null;
} }
export interface BaseViews { export interface BaseViews {
+12 -3
View File
@@ -223,9 +223,18 @@ export type InsertableBaseProperty = Insertable<BaseProperties>;
export type UpdatableBaseProperty = Updateable<Omit<BaseProperties, 'id'>>; export type UpdatableBaseProperty = Updateable<Omit<BaseProperties, 'id'>>;
// Base Row // Base Row
export type BaseRow = Selectable<BaseRows>; // `searchText` and `searchTsv` are internal fulltext-index columns maintained
export type InsertableBaseRow = Insertable<BaseRows>; // by a trigger. They are omitted from the public types so they never leak into
export type UpdatableBaseRow = Updateable<Omit<BaseRows, 'id'>>; // HTTP responses or write payloads.
export type BaseRow = Omit<Selectable<BaseRows>, 'searchText' | 'searchTsv'>;
export type InsertableBaseRow = Omit<
Insertable<BaseRows>,
'searchText' | 'searchTsv'
>;
export type UpdatableBaseRow = Omit<
Updateable<Omit<BaseRows, 'id'>>,
'searchText' | 'searchTsv'
>;
// Base View // Base View
export type BaseView = Selectable<BaseViews>; export type BaseView = Selectable<BaseViews>;
@@ -9,6 +9,7 @@ export enum QueueName {
HISTORY_QUEUE = '{history-queue}', HISTORY_QUEUE = '{history-queue}',
NOTIFICATION_QUEUE = '{notification-queue}', NOTIFICATION_QUEUE = '{notification-queue}',
AUDIT_QUEUE = '{audit-queue}', AUDIT_QUEUE = '{audit-queue}',
BASE_QUEUE = '{base-queue}',
} }
export enum QueueJob { export enum QueueJob {
@@ -83,4 +84,7 @@ export enum QueueJob {
PDF_EXPORT_TASK = 'pdf-export-task', PDF_EXPORT_TASK = 'pdf-export-task',
PDF_EXPORT_CLEANUP = 'pdf-export-cleanup', PDF_EXPORT_CLEANUP = 'pdf-export-cleanup',
BASE_TYPE_CONVERSION = 'base-type-conversion',
BASE_CELL_GC = 'base-cell-gc',
} }
@@ -113,3 +113,27 @@ export interface IApprovalRejectedNotificationJob {
requestedById: string; requestedById: string;
comment?: string; comment?: string;
} }
export interface IBaseTypeConversionJob {
baseId: string;
propertyId: string;
workspaceId: string;
fromType: string;
toType: string;
// Snapshots taken at enqueue time so the job stays correct even if the
// property's current typeOptions drift while the job waits in the queue.
fromTypeOptions: unknown;
toTypeOptions: unknown;
// When true, the job nulls the cell values for that property instead of
// attempting a value conversion. Used for any conversion where the new
// type has no meaningful representation of the old value (e.g. involving
// a system type).
clearMode: boolean;
actorId?: string;
}
export interface IBaseCellGcJob {
baseId: string;
propertyId: string;
workspaceId: string;
}
@@ -92,6 +92,14 @@ import { GeneralQueueProcessor } from './processors/general-queue.processor';
attempts: 3, attempts: 3,
}, },
}), }),
BullModule.registerQueue({
name: QueueName.BASE_QUEUE,
defaultJobOptions: {
attempts: 2,
removeOnComplete: { count: 200 },
removeOnFail: { count: 100 },
},
}),
], ],
exports: [BullModule], exports: [BullModule],
providers: [GeneralQueueProcessor], providers: [GeneralQueueProcessor],
+19 -1
View File
@@ -1,6 +1,7 @@
import { import {
MessageBody, MessageBody,
OnGatewayConnection, OnGatewayConnection,
OnGatewayDisconnect,
OnGatewayInit, OnGatewayInit,
SubscribeMessage, SubscribeMessage,
WebSocketGateway, WebSocketGateway,
@@ -13,6 +14,7 @@ import { OnModuleDestroy } from '@nestjs/common';
import { SpaceMemberRepo } from '@docmost/db/repos/space/space-member.repo'; import { SpaceMemberRepo } from '@docmost/db/repos/space/space-member.repo';
import { WsService } from './ws.service'; import { WsService } from './ws.service';
import { getSpaceRoomName, getUserRoomName } from './ws.utils'; import { getSpaceRoomName, getUserRoomName } from './ws.utils';
import { BaseWsService } from '../core/base/realtime/base-ws.service';
import * as cookie from 'cookie'; import * as cookie from 'cookie';
@WebSocketGateway({ @WebSocketGateway({
@@ -20,7 +22,11 @@ import * as cookie from 'cookie';
transports: ['websocket'], transports: ['websocket'],
}) })
export class WsGateway export class WsGateway
implements OnGatewayConnection, OnGatewayInit, OnModuleDestroy implements
OnGatewayConnection,
OnGatewayDisconnect,
OnGatewayInit,
OnModuleDestroy
{ {
@WebSocketServer() @WebSocketServer()
server: Server; server: Server;
@@ -29,10 +35,12 @@ export class WsGateway
private tokenService: TokenService, private tokenService: TokenService,
private spaceMemberRepo: SpaceMemberRepo, private spaceMemberRepo: SpaceMemberRepo,
private wsService: WsService, private wsService: WsService,
private baseWsService: BaseWsService,
) {} ) {}
afterInit(server: Server): void { afterInit(server: Server): void {
this.wsService.setServer(server); this.wsService.setServer(server);
this.baseWsService.setServer(server);
} }
async handleConnection(client: Socket, ...args: any[]): Promise<void> { async handleConnection(client: Socket, ...args: any[]): Promise<void> {
@@ -47,6 +55,7 @@ export class WsGateway
const workspaceId = token.workspaceId; const workspaceId = token.workspaceId;
client.data.userId = userId; client.data.userId = userId;
client.data.workspaceId = workspaceId;
const userSpaceIds = await this.spaceMemberRepo.getUserSpaceIds(userId); const userSpaceIds = await this.spaceMemberRepo.getUserSpaceIds(userId);
@@ -61,10 +70,19 @@ export class WsGateway
} }
} }
async handleDisconnect(client: Socket): Promise<void> {
await this.baseWsService.handleDisconnect(client);
}
@SubscribeMessage('message') @SubscribeMessage('message')
async handleMessage(client: Socket, data: any): Promise<void> { async handleMessage(client: Socket, data: any): Promise<void> {
if (this.wsService.isTreeEvent(data)) { if (this.wsService.isTreeEvent(data)) {
await this.wsService.handleTreeEvent(client, data); await this.wsService.handleTreeEvent(client, data);
return;
}
if (this.baseWsService.isBaseEvent(data)) {
await this.baseWsService.handleInbound(client, data);
return;
} }
} }
+2 -1
View File
@@ -3,10 +3,11 @@ import { WsGateway } from './ws.gateway';
import { WsService } from './ws.service'; import { WsService } from './ws.service';
import { WsTreeService } from './ws-tree.service'; import { WsTreeService } from './ws-tree.service';
import { TokenModule } from '../core/auth/token.module'; import { TokenModule } from '../core/auth/token.module';
import { BaseModule } from '../core/base/base.module';
@Global() @Global()
@Module({ @Module({
imports: [TokenModule], imports: [TokenModule, BaseModule],
providers: [WsGateway, WsService, WsTreeService], providers: [WsGateway, WsService, WsTreeService],
exports: [WsGateway, WsService, WsTreeService], exports: [WsGateway, WsService, WsTreeService],
}) })
+11
View File
@@ -16,3 +16,14 @@ export const TREE_EVENTS = new Set([
'deleteTreeNode', 'deleteTreeNode',
'refetchRootTreeNodeEvent', 'refetchRootTreeNodeEvent',
]); ]);
export function getBaseRoomName(baseId: string): string {
return `base-${baseId}`;
}
export const BASE_INBOUND_EVENTS = new Set([
'base:subscribe',
'base:unsubscribe',
'base:presence',
'base:presence:leave',
]);