diff --git a/packages/runner/src/builder/types.ts b/packages/runner/src/builder/types.ts index ee3f124c5..9b884968b 100644 --- a/packages/runner/src/builder/types.ts +++ b/packages/runner/src/builder/types.ts @@ -84,7 +84,7 @@ declare module "@commontools/api" { connect(node: NodeRef): void; export(): { cell: OpaqueRef; - path: PropertyKey[]; + path: readonly PropertyKey[]; value?: Opaque; defaultValue?: Opaque; nodes: Set; @@ -96,7 +96,7 @@ declare module "@commontools/api" { }; unsafe_bindToRecipeAndPath( recipe: Recipe, - path: PropertyKey[], + path: readonly PropertyKey[], ): void; unsafe_getExternal(): OpaqueRef; map( @@ -181,7 +181,9 @@ declare module "@commontools/api" { nodes: Node[]; [unsafe_originalRecipe]?: Recipe; [unsafe_parentRecipe]?: Recipe; - [unsafe_materializeFactory]?: (log: any) => (path: PropertyKey[]) => any; + [unsafe_materializeFactory]?: ( + log: any, + ) => (path: readonly PropertyKey[]) => any; } } @@ -228,7 +230,7 @@ export function isShadowRef(value: unknown): value is ShadowRef { export type UnsafeBinding = { recipe: Recipe; - materialize: (path: PropertyKey[]) => any; + materialize: (path: readonly PropertyKey[]) => any; parent?: UnsafeBinding; }; diff --git a/packages/runner/src/cell.ts b/packages/runner/src/cell.ts index 20618c5be..b60e39f36 100644 --- a/packages/runner/src/cell.ts +++ b/packages/runner/src/cell.ts @@ -39,7 +39,7 @@ import { type NormalizedFullLink, parseNormalizedFullLinktoLegacyDocCellLink, } from "./link-utils.ts"; -import { type IStorageTransaction } from "./storage/interface.ts"; +import type { IExtendedStorageTransaction as IStorageTransaction } from "./storage/interface.ts"; /** * This is the regular Cell interface, generated by DocImpl.asCell(). @@ -174,7 +174,7 @@ declare module "@commontools/api" { ): Cell; withLog(log: ReactivityLog): Cell; sink(callback: (value: T) => Cancel | undefined | void): Cancel; - getAsQueryResult( + getAsQueryResult( path?: Path, log?: ReactivityLog, ): QueryResult>; @@ -231,7 +231,7 @@ declare module "@commontools/api" { space: MemorySpace; entityId: EntityId; sourceURI: URI; - path: PropertyKey[]; + path: readonly PropertyKey[]; [isCellMarker]: true; copyTrap: boolean; } @@ -520,7 +520,13 @@ function createRegularCell( getAsQueryResult: (subPath: PropertyKey[] = [], newLog?: ReactivityLog) => createQueryResultProxy(doc, [...path, ...subPath], newLog ?? log), getAsLegacyCellLink: (): LegacyDocCellLink => { - return { space: doc.space, cell: doc, path, schema, rootSchema }; + return { + space: doc.space, + cell: doc, + path: path as string[], + schema, + rootSchema, + }; }, getAsNormalizedFullLink: () => link, getAsLink: ( @@ -572,7 +578,13 @@ function createRegularCell( return self.get(); }, get cellLink(): LegacyDocCellLink { - return { space: doc.space, cell: doc, path, schema, rootSchema }; + return { + space: doc.space, + cell: doc, + path: path as string[], + schema, + rootSchema, + }; }, get space(): MemorySpace { return doc.space; @@ -583,7 +595,7 @@ function createRegularCell( get sourceURI(): URI { return toURI(doc.entityId); }, - get path(): PropertyKey[] { + get path(): readonly PropertyKey[] { return path; }, [isCellMarker]: true, @@ -664,7 +676,7 @@ function subscribeToReferencedDocs( */ function createSigilLink( doc: DocImpl, - path: PropertyKey[], + path: readonly PropertyKey[], schema?: JSONSchema, rootSchema?: JSONSchema, options: { diff --git a/packages/runner/src/data-updating.ts b/packages/runner/src/data-updating.ts index b5dfe4f0c..b35710876 100644 --- a/packages/runner/src/data-updating.ts +++ b/packages/runner/src/data-updating.ts @@ -24,7 +24,7 @@ import { // if there was no change. export function setNestedValue( doc: DocImpl, - path: PropertyKey[], + path: readonly PropertyKey[], value: unknown, log?: ReactivityLog, ): boolean { diff --git a/packages/runner/src/doc.ts b/packages/runner/src/doc.ts index 184181a4c..54fb7744f 100644 --- a/packages/runner/src/doc.ts +++ b/packages/runner/src/doc.ts @@ -45,7 +45,9 @@ export type DocImpl = { * @param path - Path to follow. * @returns Value. */ - getAtPath(path: Path): DeepKeyLookup; + getAtPath( + path: Path, + ): DeepKeyLookup; /** * Get as value proxy, following query (i.e. aliases) and cell references. @@ -75,7 +77,7 @@ export type DocImpl = { * @param log - Reactivity log. * @returns Simple cell. */ - asCell( + asCell( path?: Path, log?: ReactivityLog, schema?: JSONSchema, @@ -117,7 +119,7 @@ export type DocImpl = { * @returns Whether the value changed. */ setAtPath( - path: PropertyKey[], + path: readonly PropertyKey[], newValue: any, log?: ReactivityLog, schema?: JSONSchema, @@ -130,7 +132,7 @@ export type DocImpl = { * @returns Cancel function. */ updates( - callback: (value: T, path: PropertyKey[], labels?: Labels) => void, + callback: (value: T, path: readonly PropertyKey[], labels?: Labels) => void, ): Cancel; /** @@ -233,7 +235,8 @@ export type DocImpl = { copyTrap: boolean; }; -export type DeepKeyLookup = Path extends [] ? T +export type DeepKeyLookup = Path extends + [] ? T : Path extends [infer First, ...infer Rest] ? First extends keyof T ? Rest extends PropertyKey[] ? DeepKeyLookup @@ -293,7 +296,7 @@ export function createDoc( callbacks.add(callback); return () => callbacks.delete(callback); }, - getAtPath: (path: PropertyKey[]) => getValueAtPath(value, path), + getAtPath: (path: readonly PropertyKey[]) => getValueAtPath(value, path), setAtPath: ( path: PropertyKey[], newValue: any, @@ -346,7 +349,9 @@ export function createDoc( return value as T; }, get "/"(): string { - return typeof entityId.toJSON === "function" ? entityId.toJSON()["/"] : (entityId["/"] as string); + return typeof entityId.toJSON === "function" + ? entityId.toJSON()["/"] + : (entityId["/"] as string); }, get entityId(): EntityId { return entityId; @@ -399,7 +404,10 @@ export function createDoc( const docLinkToOpaqueRef = new WeakMap< Frame, - WeakMap, { path: PropertyKey[]; opaqueRef: OpaqueRef }[]> + WeakMap< + DocImpl, + { path: readonly PropertyKey[]; opaqueRef: OpaqueRef }[] + > >(); // Creates aliases to value, used in recipes to refer to this specific cell. We @@ -407,7 +415,7 @@ const docLinkToOpaqueRef = new WeakMap< // creaeting the recipe. export function makeOpaqueRef( doc: DocImpl, - path: PropertyKey[], + path: readonly PropertyKey[], ): OpaqueRef { const frame = getTopFrame(); if (!frame) throw new Error("No frame"); diff --git a/packages/runner/src/link-resolution.ts b/packages/runner/src/link-resolution.ts index 28b98af80..e7c88007b 100644 --- a/packages/runner/src/link-resolution.ts +++ b/packages/runner/src/link-resolution.ts @@ -45,7 +45,7 @@ export function createVisits(): Visits { */ function createPathCacheKey( doc: DocImpl, - path: PropertyKey[], + path: readonly PropertyKey[], aliases: boolean = false, ): string { return JSON.stringify([doc.space, doc.toJSON(), path, aliases]); @@ -53,7 +53,7 @@ function createPathCacheKey( export function resolveLinkToValue( doc: DocImpl, - path: PropertyKey[], + path: readonly PropertyKey[], log?: ReactivityLog, schema?: JSONSchema, rootSchema?: JSONSchema, @@ -65,7 +65,7 @@ export function resolveLinkToValue( export function resolveLinkToWriteRedirect( doc: DocImpl, - path: PropertyKey[], + path: readonly PropertyKey[], log?: ReactivityLog, schema?: JSONSchema, rootSchema?: JSONSchema, @@ -85,7 +85,7 @@ export function resolveLinks( function resolvePath( doc: DocImpl, - path: PropertyKey[], + path: readonly PropertyKey[], log?: ReactivityLog, schema?: JSONSchema, rootSchema?: JSONSchema, diff --git a/packages/runner/src/link-utils.ts b/packages/runner/src/link-utils.ts index 5ccc06df2..d1b349838 100644 --- a/packages/runner/src/link-utils.ts +++ b/packages/runner/src/link-utils.ts @@ -20,14 +20,17 @@ import { isQueryResultForDereferencing, QueryResultInternals, } from "./query-result-proxy.ts"; -import { type IMemoryAddress } from "./storage/interface.ts"; +import type { + IMemorySpaceAddress, + MemoryAddressPathComponent, +} from "./storage/interface.ts"; /** * Normalized link structure returned by parsers */ export type NormalizedLink = { id?: URI; // URI format with "of:" prefix - path: string[]; + path: readonly MemoryAddressPathComponent[]; space?: MemorySpace; type?: string; // Default is "application/json" schema?: JSONSchema; @@ -41,7 +44,7 @@ export type NormalizedLink = { * * Any such link can be used as a memory address. */ -export type NormalizedFullLink = NormalizedLink & IMemoryAddress; +export type NormalizedFullLink = NormalizedLink & IMemorySpaceAddress; /** * A type reflecting all possible link formats, including cells themselves. @@ -449,7 +452,7 @@ function parseToLegacyCellLinkWithMaybeACell( return { cell: cellValue, - path: link.path ?? [], + path: link.path as string[] ?? [], space: link.space, schema: link.schema, rootSchema: link.rootSchema, @@ -471,7 +474,7 @@ export function parseNormalizedFullLinktoLegacyDocCellLink( ): LegacyDocCellLink { return { cell: runtime.getCellFromLink(link).getDoc(), - path: link.path, + path: link.path as string[], } satisfies LegacyDocCellLink; } @@ -519,7 +522,7 @@ export function createSigilLinkFromParsedLink( const sigilLink: SigilLink = { "/": { [LINK_V1_TAG]: { - path: link.path, + path: link.path as string[], schema: link.schema, rootSchema: link.rootSchema, }, diff --git a/packages/runner/src/path-utils.ts b/packages/runner/src/path-utils.ts index 9662547a3..2db4bb816 100644 --- a/packages/runner/src/path-utils.ts +++ b/packages/runner/src/path-utils.ts @@ -29,7 +29,7 @@ export function setValueAtPath( return true; } -export function getValueAtPath(obj: any, path: PropertyKey[]): any { +export function getValueAtPath(obj: any, path: readonly PropertyKey[]): any { let current = obj; for (const key of path) { if (current === undefined || current === null) return undefined; @@ -65,7 +65,10 @@ export const deepEqual = (a: any, b: any): boolean => { return a !== a && b !== b; // NaN check }; -export function arrayEqual(a?: PropertyKey[], b?: PropertyKey[]): boolean { +export function arrayEqual( + a?: readonly PropertyKey[], + b?: readonly PropertyKey[], +): boolean { if (!a || !b) return a === b; if (a.length !== b.length) return false; for (let i = 0; i < a.length; i++) if (a[i] !== b[i]) return false; diff --git a/packages/runner/src/runner.ts b/packages/runner/src/runner.ts index 018c9e2e4..bf9916fe2 100644 --- a/packages/runner/src/runner.ts +++ b/packages/runner/src/runner.ts @@ -249,8 +249,9 @@ export class Runner implements IRunner { // [unsafe closures:] For recipes from closures, add a materialize factory if (recipe[unsafe_originalRecipe]) { - recipe[unsafe_materializeFactory] = (log: any) => (path: PropertyKey[]) => - processCell.getAsQueryResult(path, log); + recipe[unsafe_materializeFactory] = + (log: any) => (path: readonly PropertyKey[]) => + processCell.getAsQueryResult(path, log); } for (const node of recipe.nodes) { @@ -551,7 +552,7 @@ export class Runner implements IRunner { const frame = pushFrameFromCause(cause, { recipe, - materialize: (path: PropertyKey[]) => + materialize: (path: readonly PropertyKey[]) => processCell.getAsQueryResult(path), }); @@ -618,7 +619,7 @@ export class Runner implements IRunner { { inputs, outputs, fn: fn.toString() }, { recipe, - materialize: (path: PropertyKey[]) => + materialize: (path: readonly PropertyKey[]) => processCell.getAsQueryResult(path, log), } satisfies UnsafeBinding, ); diff --git a/packages/runner/src/runtime.ts b/packages/runner/src/runtime.ts index e4929708c..0e0e8c75c 100644 --- a/packages/runner/src/runtime.ts +++ b/packages/runner/src/runtime.ts @@ -9,9 +9,9 @@ import type { RecipeEnvironment } from "./builder/env.ts"; import { ContextualFlowControl } from "./cfc.ts"; import { setRecipeEnvironment } from "./builder/env.ts"; import type { + IExtendedStorageTransaction, IStorageManager, IStorageProvider, - IStorageTransaction, MemorySpace, } from "./storage/interface.ts"; import { type Cell } from "./cell.ts"; @@ -36,9 +36,9 @@ import { } from "./link-utils.ts"; export type { + IExtendedStorageTransaction, IStorageManager, IStorageProvider, - IStorageTransaction, MemorySpace, }; @@ -92,7 +92,7 @@ export interface IRuntime { dispose(): Promise; // Storage transaction method - edit(): IStorageTransaction; + edit(): IExtendedStorageTransaction; // Cell factory methods getCell( @@ -416,7 +416,7 @@ export class Runtime implements IRuntime { * locally replicated memory spaces. Transaction allows reading from many * multiple spaces but writing only to one space. */ - edit(): IStorageTransaction { + edit(): IExtendedStorageTransaction { return new StorageTransaction(this); } diff --git a/packages/runner/src/scheduler.ts b/packages/runner/src/scheduler.ts index 50b8ded63..4486fc697 100644 --- a/packages/runner/src/scheduler.ts +++ b/packages/runner/src/scheduler.ts @@ -120,7 +120,7 @@ export class Scheduler implements IScheduler { this.cancels.set( action, reads.map(({ cell: doc, path }) => - doc.updates((_newValue: any, changedPath: PropertyKey[]) => { + doc.updates((_newValue: any, changedPath: readonly PropertyKey[]) => { if (pathAffected(changedPath, path)) { this.dirty.add(doc); this.queueExecution(); @@ -454,7 +454,10 @@ export function compactifyPaths( return result; } -function pathAffected(changedPath: PropertyKey[], path: PropertyKey[]) { +function pathAffected( + changedPath: readonly PropertyKey[], + path: readonly PropertyKey[], +) { changedPath = changedPath.map((key) => key.toString()); // Normalize to strings as keys return ( (changedPath.length <= path.length && diff --git a/packages/runner/src/schema.ts b/packages/runner/src/schema.ts index 3996ee574..d0c0df2ed 100644 --- a/packages/runner/src/schema.ts +++ b/packages/runner/src/schema.ts @@ -7,7 +7,7 @@ import { type LegacyDocCellLink, LINK_V1_TAG } from "./sigil-types.ts"; import { type ReactivityLog } from "./scheduler.ts"; import { resolveLinks, resolveLinkToWriteRedirect } from "./link-resolution.ts"; import { toURI } from "./uri-utils.ts"; -import { type IStorageTransaction } from "./storage/interface.ts"; +import { type IExtendedStorageTransaction } from "./storage/interface.ts"; import { type IRuntime } from "./runtime.ts"; import { type NormalizedFullLink } from "./link-utils.ts"; @@ -84,7 +84,7 @@ function resolveSchema( */ function processDefaultValue( runtime: IRuntime, - tx: IStorageTransaction, + tx: IExtendedStorageTransaction, link: NormalizedFullLink, defaultValue: any, log?: ReactivityLog, @@ -273,7 +273,7 @@ function mergeDefaults( export function validateAndTransform( runtime: IRuntime, - tx: IStorageTransaction, + tx: IExtendedStorageTransaction, link: NormalizedFullLink, log?: ReactivityLog, seen: Array<[string, any]> = [], diff --git a/packages/runner/src/sigil-types.ts b/packages/runner/src/sigil-types.ts index 7b8d188f3..95b4f2f40 100644 --- a/packages/runner/src/sigil-types.ts +++ b/packages/runner/src/sigil-types.ts @@ -20,7 +20,7 @@ export const LINK_V1_TAG = "link@1" as const; export type LinkV1 = { [LINK_V1_TAG]: { id?: URI; - path?: string[]; + path?: readonly string[]; space?: MemorySpace; schema?: JSONSchema; rootSchema?: JSONSchema; @@ -71,7 +71,7 @@ export type LegacyDocCellLink = { export type LegacyAlias = { $alias: { cell?: DocImpl | ShadowRef | number; - path: PropertyKey[]; + path: readonly PropertyKey[]; schema?: JSONSchema; rootSchema?: JSONSchema; }; diff --git a/packages/runner/src/storage/cache.ts b/packages/runner/src/storage/cache.ts index 52d506db5..5163c2e37 100644 --- a/packages/runner/src/storage/cache.ts +++ b/packages/runner/src/storage/cache.ts @@ -1,9 +1,7 @@ -import { fromString, refer } from "merkle-reference"; -import { isBrowser } from "@commontools/utils/env"; -import { isObject } from "@commontools/utils/types"; import type { + Assertion, AuthorizationError, - Changes, + Changes as MemoryChanges, Commit, ConflictError, ConnectionError, @@ -11,12 +9,18 @@ import type { Entity, Fact, FactAddress, + Invariant, + JSONValue, MemorySpace, Protocol, ProviderCommand, + ProviderSession, QueryError, + Reference, Result, Revision, + SchemaContext, + SchemaPathSelector, SchemaQueryArgs, Signer, State, @@ -25,25 +29,44 @@ import type { TransactionError, UCAN, Unit, + Variant, } from "@commontools/memory/interface"; import { set, setSelector } from "@commontools/memory/selection"; import type { MemorySpaceSession } from "@commontools/memory/consumer"; -import { assert, retract, unclaimed } from "@commontools/memory/fact"; +import { assert, claim, retract, unclaimed } from "@commontools/memory/fact"; import { the, toChanges, toRevision } from "@commontools/memory/commit"; import * as Consumer from "@commontools/memory/consumer"; import * as Codec from "@commontools/memory/codec"; -import { SchemaNone } from "@commontools/memory/schema"; import { type Cancel, type EntityId } from "@commontools/runner"; -import type { JSONSchema, JSONValue, SchemaContext } from "../builder/types.ts"; +import type { JSONSchema } from "../builder/types.ts"; import { ContextualFlowControl } from "../cfc.ts"; import { deepEqual } from "../path-utils.ts"; -import { MapSet, type SchemaPathSelector } from "../traverse.ts"; -import type { IStorageProvider, StorageValue } from "./interface.ts"; +import { MapSet } from "../traverse.ts"; +import { fromString, refer } from "merkle-reference"; +import { isBrowser } from "@commontools/utils/env"; +import { isObject } from "@commontools/utils/types"; +import type { + Assert, + Claim, + IRemoteStorageProviderSettings, + IStorageManager, + IStorageManagerV2, + IStorageProvider, + IStorageProviderWithReplica, + IStorageTransaction, + IStoreError, + ITransaction, + PushError, + Retract, + StorageValue, +} from "./interface.ts"; import { BaseStorageProvider } from "./base.ts"; import * as IDB from "./idb.ts"; +export * from "@commontools/memory/interface"; import { Channel, RawCommand } from "./inspector.ts"; +import { SchemaNone } from "@commontools/memory/schema"; +import * as Transaction from "./transaction.ts"; -export * from "@commontools/memory/interface"; export type { Result, Unit }; export interface Selector extends Iterable { } @@ -66,7 +89,7 @@ export interface AsyncPull { pull( selector: Selector
, ): Promise< - Result, StoreError> + Result, IStoreError> >; } @@ -74,7 +97,7 @@ export interface AsyncPush { merge( entries: Iterable, merge: Merge, - ): Promise>; + ): Promise>; } export interface AsyncStore @@ -85,7 +108,7 @@ export interface SyncPull { pull( selector: Selector
, ): Promise< - Result, StoreError> + Result, IStoreError> >; } @@ -93,7 +116,7 @@ export interface SyncPush { merge( entries: Iterable, merge: Merge, - ): Result; + ): Result; } export interface SyncStore @@ -105,23 +128,6 @@ interface NotFoundError extends Error { address: FactAddress; } -interface StoreError extends Error { - name: "StoreError"; - cause: Error; -} - -export interface Assert { - the: The; - of: Entity; - is: JSONValue; -} - -export interface Retract { - the: The; - of: Entity; - is?: void; -} - const toKey = ({ the, of }: FactAddress) => `${of}/${the}`; const fromKey = (key: string): FactAddress => { const separatorIndex = key.indexOf("/"); @@ -140,7 +146,7 @@ export class NoCache */ async pull( selector: Selector
, - ): Promise, StoreError>> { + ): Promise, IStoreError>> { return await { ok: new Map() }; } @@ -160,6 +166,24 @@ class Nursery implements SyncPush { return undefined; } + /** + * If state `before` and `after` are the same that implies that remote has + * caught up with `nursery` so we evict record from nursery allowing reads + * to fall through to `heap`. If `after` is `undefined` that is very unusual, + * yet we keep value from `before` as nursery is more likely ahead. If + * `before` is `undefined` keep it as is because reads would fall through + * to `heap` anyway. + */ + static evict(before?: State, after?: State) { + return before == undefined + ? undefined + : after === undefined + ? before + : JSON.stringify(before) === JSON.stringify(after) + ? undefined + : before; + } + constructor(public store: Map = new Map()) { } get(entry: FactAddress) { @@ -442,6 +466,10 @@ export class Replica { this.pull = this.pull.bind(this); } + did(): MemorySpace { + return this.space; + } + async poll() { // Poll re-fetches the commit log, then subscribes to that // We don't use the autosubscribing query, since we want the @@ -470,7 +498,7 @@ export class Replica { if (next.done) { break; } - this.merge(next.value[this.space] as unknown as Commit); + this.integrate(next.value[this.space] as unknown as Commit); } } @@ -485,7 +513,7 @@ export class Replica { ): Promise< Result< Selection>, - StoreError | QueryError | AuthorizationError | ConnectionError + IStoreError | QueryError | AuthorizationError | ConnectionError > > { // If requested entry list is empty there is nothing to fetch so we return @@ -603,7 +631,7 @@ export class Replica { ): Promise< Result< Selection>, - StoreError | QueryError | AuthorizationError | ConnectionError + IStoreError | QueryError | AuthorizationError | ConnectionError > > { // First we identify entries that we need to load from the store. @@ -689,16 +717,11 @@ export class Replica { * would assume state that is rejected. */ async push( - changes: (Assert | Retract)[], + changes: (Assert | Retract | Claim)[], ): Promise< Result< Commit, - | StoreError - | QueryError - | ConnectionError - | ConflictError - | TransactionError - | AuthorizationError + PushError > > { // First we pull all the affected entries into heap so we can build a @@ -712,10 +735,18 @@ export class Replica { // Collect facts so that we can derive desired state and a corresponding // transaction const facts: Fact[] = []; - for (const { the, of, is } of changes) { + const claims: Invariant[] = []; + for (const { the, of, is, claim } of changes) { const fact = this.get({ the, of }); - // If `is` is `undefined` we want to retract the fact. - if (is === undefined) { + + if (claim) { + claims.push({ + the, + of, + fact: refer(fact), + }); + } else if (is === undefined) { + // If `is` is `undefined` we want to retract the fact. // If local `is` in the local state is also `undefined` desired state // matches current state in which case we omit this change from the // transaction, otherwise we retract fact. @@ -733,71 +764,104 @@ export class Replica { } } - // Store facts in a nursery so that subsequent changes will be build - // optimistically assuming that push will succeed. - this.nursery.merge(facts, Nursery.put); - // Track all our pending changes - facts.map((fact) => - this.pendingNurseryChanges.add(toKey(fact), fact.cause.toString()) - ); - // These push transaction that will commit desired state to a remote. - const result = await this.remote.transact({ - changes: getChanges(facts), - }); + return this.commit({ facts, claims }); + } + } - // If transaction fails we delete facts from the nursery so that new - // changes will not build upon rejected state. If there are other inflight - // transactions that already were built upon our facts they will also fail. - if (result.error) { - for (const fact of facts) { - this.pendingNurseryChanges.deleteValue( - toKey(fact), - fact.cause.toString(), - ); - this.seenNurseryChanges.delete(toKey(fact)); - } - this.nursery.merge(facts, Nursery.delete); - const fact = result.error.name === "ConflictError" && - result.error.conflict.actual; - // We also update heap so it holds latest record - if (fact) { - this.heap.merge([fact], Replica.update); - } - } // - // If transaction succeeded we promote facts from nursery into a heap. - else { - const commit = toRevision(result.ok); - const { since } = commit.is; - // Turn facts into revisions corresponding with the commit. - const revisions = [ - ...facts.map((fact) => ({ ...fact, since })), - // We strip transaction info so we don't duplicate same data - { ...commit, is: { since: commit.is.since } }, - ]; - // Avoid sending out updates to subscribers if it's a fact we already - // know about in the nursery. - const localFacts = this.getLocalFacts(revisions); - this.heap.merge( - revisions, - Replica.put, - (revision) => !localFacts.has(revision), - ); - // We only delete from the nursery when we've seen all of our pending - // facts (or gotten a conflict). - // Server facts may have newer nursery changes that we want to keep. - const freshFacts = revisions.filter((revision) => - this.pendingNurseryChanges.get(toKey(revision))?.size ?? 0 === 0 + async commit({ facts, claims }: ITransaction) { + // Store facts in a nursery so that subsequent changes will be build + // optimistically assuming that push will succeed. + this.nursery.merge(facts, Nursery.put); + // Track all our pending changes + facts.map((fact) => + this.pendingNurseryChanges.add(toKey(fact), fact.cause.toString()) + ); + + // These push transaction that will commit desired state to a remote. + const result = await this.remote.transact({ + changes: getChanges([...claims, ...facts] as Statement[]), + }); + + // If transaction fails we delete facts from the nursery so that new + // changes will not build upon rejected state. If there are other inflight + // transactions that already were built upon our facts they will also fail. + if (result.error) { + for (const fact of facts) { + this.pendingNurseryChanges.deleteValue( + toKey(fact), + fact.cause.toString(), ); - this.nursery.merge(freshFacts, Nursery.delete); - for (const fact of freshFacts) { - this.pendingNurseryChanges.delete(toKey(fact)); - this.seenNurseryChanges.delete(toKey(fact)); - } + this.seenNurseryChanges.delete(toKey(fact)); + } + this.nursery.merge(facts, Nursery.delete); + const fact = result.error.name === "ConflictError" && + result.error.conflict.actual; + // We also update heap so it holds latest record + if (fact) { + this.heap.merge([fact], Replica.update); + } + } // + // If transaction succeeded we promote facts from nursery into a heap. + else { + const commit = toRevision(result.ok); + const { since } = commit.is; + // Turn facts into revisions corresponding with the commit. + const revisions = [ + ...facts.map((fact) => ({ ...fact, since })), + // We strip transaction info so we don't duplicate same data + { ...commit, is: { since: commit.is.since } }, + ]; + // Avoid sending out updates to subscribers if it's a fact we already + // know about in the nursery. + const localFacts = this.getLocalFacts(revisions); + this.heap.merge( + revisions, + Replica.put, + (revision) => !localFacts.has(revision), + ); + // We only delete from the nursery when we've seen all of our pending + // facts (or gotten a conflict). + // Server facts may have newer nursery changes that we want to keep. + const freshFacts = revisions.filter((revision) => + this.pendingNurseryChanges.get(toKey(revision))?.size ?? 0 === 0 + ); + this.nursery.merge(freshFacts, Nursery.delete); + for (const fact of freshFacts) { + this.pendingNurseryChanges.delete(toKey(fact)); + this.seenNurseryChanges.delete(toKey(fact)); } - - return result; } + // If transaction fails we delete facts from the nursery so that new + // changes will not build upon rejected state. If there are other inflight + // transactions that already were built upon our facts they will also fail. + if (result.error) { + this.nursery.merge(facts, Nursery.delete); + const fact = result.error.name === "ConflictError" && + result.error.conflict.actual; + // We also update heap so it holds latest record + if (fact) { + this.heap.merge([fact], Replica.update); + } + } // + // If transaction succeeded we promote facts from nursery into a heap. + else { + const commit = toRevision(result.ok); + const { since } = commit.is; + const revisions = [ + ...facts.map((fact) => ({ ...fact, since })), + // We strip transaction info so we don't duplicate same data + { ...commit, is: { since: commit.is.since } }, + ]; + // Turn facts into revisions corresponding with the commit. + this.heap.merge(revisions, Replica.put); + // Evict redundant facts which we just merged into `heap` so that reads + // will occur from `heap`. This way future changes upstream we not get + // shadowed by prior local changes. + this.nursery.merge(facts, Nursery.evict); + } + + return result; } subscribe(entry: FactAddress, subscriber: (value?: Revision) => void) { @@ -810,7 +874,7 @@ export class Replica { this.heap.unsubscribe(entry, subscriber); } - merge(commit: Commit) { + integrate(commit: Commit) { const { the, of, cause, is, since } = toRevision(commit); const revisions = [ { the, of, cause, is: { since: is.since }, since }, @@ -834,7 +898,7 @@ export class Replica { * Returns state corresponding to the requested entry. If there is a pending * state returns it otherwise returns recent state. */ - get(entry: FactAddress) { + get(entry: FactAddress): State | undefined { return this.nursery.get(entry) ?? this.heap.get(entry); } @@ -912,10 +976,10 @@ export interface RemoteStorageProviderOptions { session: Consumer.MemoryConsumer; space: MemorySpace; the?: string; - settings?: RemoteStorageProviderSettings; + settings?: IRemoteStorageProviderSettings; } -export const defaultSettings: RemoteStorageProviderSettings = { +export const defaultSettings: IRemoteStorageProviderSettings = { maxSubscriptionsPerSpace: 50_000, connectionTimeout: 30_000, useSchemaQueries: true, @@ -973,6 +1037,9 @@ class ProviderConnection implements IStorageProvider { get settings() { return this.provider.settings; } + get replica() { + return this.provider.replica; + } connect() { const { connection } = this; // If we already have a connection we remove all the listeners from it. @@ -1111,7 +1178,7 @@ class ProviderConnection implements IStorageProvider { break; } default: - throw new Error(`Unknown event type: ${event.type}`); + throw new RangeError(`Unknown event type: ${event.type}`); } this.connect(); @@ -1223,7 +1290,7 @@ export class Provider implements IStorageProvider { the: string; session: Consumer.MemoryConsumer; spaces: Map; - settings: RemoteStorageProviderSettings; + settings: IRemoteStorageProviderSettings; subscribers: Map) => void>> = new Map(); @@ -1259,6 +1326,9 @@ export class Provider implements IStorageProvider { address: options.address, }); } + get replica() { + return this.workspace; + } mount(space: MemorySpace): Replica { const replica = this.spaces.get(space); @@ -1342,7 +1412,7 @@ export class Provider implements IStorageProvider { | ConnectionError | AuthorizationError | QueryError - | StoreError + | IStoreError > > { const { the, workspace } = this; @@ -1464,26 +1534,15 @@ export interface Options { /** * Various settings to configure storage provider. */ - settings?: RemoteStorageProviderSettings; + settings?: IRemoteStorageProviderSettings; } -export interface IStorageManager { - id: string; - open(space: string): IStorageProvider; -} - -export interface LocalStorageOptions { - as: Signer; - id?: string; - settings?: RemoteStorageProviderSettings; -} - -export class StorageManager implements IStorageManager { +export class StorageManager implements IStorageManager, IStorageManagerV2 { address: URL; as: Signer; id: string; - settings: RemoteStorageProviderSettings; - #providers: Map = new Map(); + settings: IRemoteStorageProviderSettings; + #providers: Map = new Map(); static open(options: Options) { if (options.address.protocol === "memory:") { @@ -1520,7 +1579,7 @@ export class StorageManager implements IStorageManager { return provider; } - protected connect(space: MemorySpace): IStorageProvider { + protected connect(space: MemorySpace): IStorageProviderWithReplica { const { id, address, as, settings } = this; return Provider.connect({ id, @@ -1539,16 +1598,21 @@ export class StorageManager implements IStorageManager { await Promise.all(promises); } + + /** + * Creates a storage transaction that can be used to read / write data into + * locally replicated memory spaces. Transaction allows reading from many + * multiple spaces but writing only to one space. + */ + edit(): IStorageTransaction { + return Transaction.create(this); + } } -export const getChanges = < - T extends The, - Of extends Entity, - Is extends JSONValue, ->( +export const getChanges = ( statements: Iterable, ) => { - const changes = {} as Changes; + const changes = {} as MemoryChanges; for (const statement of statements) { if (statement.cause) { const cause = statement.cause.toString(); @@ -1563,7 +1627,9 @@ export const getChanges = < }; // Given an Assert statement with labels, return a SchemaContext with the ifc tags -const getSchema = (change: Assert | Retract): SchemaContext | undefined => { +const getSchema = ( + change: Assert | Retract | Claim, +): SchemaContext | undefined => { if (isObject(change?.is) && "labels" in change.is) { const schema = { ifc: change.is.labels } as JSONSchema; return { schema: schema, rootSchema: schema }; diff --git a/packages/runner/src/storage/interface.ts b/packages/runner/src/storage/interface.ts index bd09e2926..ae820381e 100644 --- a/packages/runner/src/storage/interface.ts +++ b/packages/runner/src/storage/interface.ts @@ -1,16 +1,21 @@ import type { EntityId } from "../doc-map.ts"; import type { Cancel } from "../cancel.ts"; import type { - AuthorizationError, - Commit, - ConflictError, - ConnectionError, + Assertion, + AuthorizationError as IAuthorizationError, + ConflictError as IConflictError, + ConnectionError as IConnectionError, Entity as URI, + Fact, + FactAddress, + Invariant as IClaim, JSONValue, MemorySpace, - Reference, + QueryError as IQueryError, Result, + Retraction, SchemaContext, + Signer, State, The as MediaType, TransactionError, @@ -18,7 +23,38 @@ import type { Variant, } from "@commontools/memory/interface"; -export type { JSONValue, MemorySpace, Result, SchemaContext, Unit }; +export type { + Assertion, + Fact, + IClaim, + JSONValue, + MemorySpace, + Result, + SchemaContext, + State, + Unit, +}; + +/** + * @deprecated - Use IAttestation instead + */ +export type Read = IAttestation; +/** + * @deprecated - Use IAttestation instead + */ +export type Write = IAttestation; + +export interface IStorageTransactionInvariant { + read?: IStorageInvariant; + write?: IStorageInvariant; +} + +export interface IStorageTransactionLog { + get(address: IMemorySpaceAddress): IStorageTransactionInvariant; + addRead(read: IStorageInvariant): void; + addWrite(write: IStorageInvariant): void; + [Symbol.iterator](): Iterator; +} // This type is used to tag a document with any important metadata. // Currently, the only supported type is the classification. @@ -36,7 +72,32 @@ export interface StorageValue { export interface IStorageManager { id: string; - open(space: MemorySpace): IStorageProvider; + open(space: MemorySpace): IStorageProviderWithReplica; +} + +export interface IRemoteStorageProviderSettings { + /** + * Number of subscriptions remote storage provider is allowed to have per + * space. + */ + maxSubscriptionsPerSpace: number; + + /** + * Amount of milliseconds we will spend waiting on WS connection before we + * abort. + */ + connectionTimeout: number; + + /** + * Flag to enable or disable remote schema subscriptions + */ + useSchemaQueries: boolean; +} + +export interface LocalStorageOptions { + as: Signer; + id?: string; + settings?: IRemoteStorageProviderSettings; } export interface IStorageProvider { @@ -100,6 +161,10 @@ export interface IStorageProvider { getReplica(): string | undefined; } +export interface IStorageProviderWithReplica extends IStorageProvider { + replica: ISpaceReplica; +} + export interface IStorageManagerV2 { /** * Creates a storage transaction that can be used to read / write data into @@ -109,6 +174,21 @@ export interface IStorageManagerV2 { edit(): IStorageTransaction; } +export type IStorageTransactionProgress = Variant<{ + open: IStorageTransactionLog; + pending: IStorageTransactionLog; + done: IStorageTransactionLog; +}>; +export type StorageTransactionStatus = Result< + IStorageTransactionState, + StorageTransactionFailed +>; + +export type IStorageTransactionState = + | { status: "ready"; journal: ITransactionJournal } + | { status: "pending"; journal: ITransactionJournal } + | { status: "done"; journal: ITransactionJournal }; + /** * Representation of a storage transaction, which can be used to query facts and * assert / retract while maintaining consistency guarantees. Storage ensures @@ -135,29 +215,7 @@ export interface IStorageTransaction { * This allows transactor to cancel and recreate transaction with a current * state without having to build up a whole transaction and commiting it. */ - status(): Result< - IStorageTransactionProgress, - IStorageTransactionError - >; - - /** - * Returns the log of the transaction. - * - * The log is a list of changes that have been made to the transaction. - * It is used to track the dependencies of the transaction. - * - * If the transaction is aborted, the log reflects the attempted reads and - * writes. If the transaction is committed, the log reflects the actual reads - * and writes. - */ - log(): IStorageTransactionLog; - - /** - * Creates a memory space reader for inside this transaction. Fails if - * transaction is no longer in progress. Requesting a reader for the same - * memory space will return same reader instance. - */ - reader(space: MemorySpace): Result; + // status(): StorageTransactionStatus; /** * Helper that is the same as `reader().read()` but more convenient, as it @@ -171,24 +229,7 @@ export interface IStorageTransaction { * @param address - Memory address to read from. * @returns Result containing the read value or an error. */ - read(address: IMemoryAddress): Result; - - /** - * Reads a value from a (local) memory address and throws on error, except for - * `NotFoundError` which is returned as undefined. - * - * @param address - Memory address to read from. - * @returns The read value. - */ - readValueOrThrow(address: IMemoryAddress): JSONValue | undefined; - - /** - * Creates a memory space writer for this transaction. Fails if transaction is - * no longer in progress or if writer for the different space was already open - * on this transaction. Requesting a writer for the same memory space will - * return same writer instance. - */ - writer(space: MemorySpace): Result; + read(adddress: IMemorySpaceAddress): Result; /** * Helper that is the same as `writer().write()` but more convenient, as it @@ -201,7 +242,29 @@ export interface IStorageTransaction { * @param value - Value to write. * @returns Result containing the written value or an error. */ - write(address: IMemoryAddress, value: JSONValue): Result; + write( + address: IMemorySpaceAddress, + value?: JSONValue, + ): Result; + + /** + * Creates a memory space reader for inside this transaction. Fails if + * transaction is no longer in progress. Requesting a reader for the same + * memory space will return same reader instance. + */ + reader( + space: MemorySpace, + ): Result; + + /** + * Creates a memory space writer for this transaction. Fails if transaction is + * no longer in progress or if writer for the different space was already open + * on this transaction. Requesting a writer for the same memory space will + * return same writer instance. + */ + writer( + space: MemorySpace, + ): Result; /** * Transaction can be cancelled which causes storage provider to stop keeping @@ -209,7 +272,7 @@ export interface IStorageTransaction { * produce {@link InactiveTransactionError}. Aborted transactions will produce * {@link IStorageTransactionAborted} error on attempt to commit. */ - abort(reason?: Unit): Result; + abort(reason?: unknown): Result; /** * Commits transaction. If transaction is no longer active, this will @@ -219,7 +282,7 @@ export interface IStorageTransaction { * * If transaction is still active and no consistency guarantees have being * invalidated it will be send upstream and status will be updated to - * `pending`. Transaction may still fail with {@link IStorageTransactionFailed} + * `pending`. Transaction may still fail with {@link IStorageTransactionRejected} * if state upstream affects values read from updated space have changed, * which can happen if another client concurrently updates them. Transaction * MAY also fail due to insufficient authorization level or due to various IO @@ -229,15 +292,56 @@ export interface IStorageTransaction { * exact value as on first call and no execution will take place on subsequent * calls. */ - commit(): Promise>; + commit(): Promise>; +} + +export interface IExtendedStorageTransaction extends IStorageTransaction { + /** + * Describes current status of the transaction. If transaction has failed + * or was cancelled result will be an error with a corresponding error variant. + * If transaction is being built it will have `open` status, if commit was + * called but promise has not resolved yet it will be `pending`. If commit + * successfully completed it will be `done`. + * + * Please note that if storage was updated since transaction was created such + * that any of the invariants have changed status will be change to + * `IStorageConsistencyError` even though transaction has not being commited. + * This allows transactor to cancel and recreate transaction with a current + * state without having to build up a whole transaction and commiting it. + */ + status(): Result; + + /** + * Reads a value from a (local) memory address and throws on error, except for + * `NotFoundError` which is returned as undefined. + * + * @param address - Memory address to read from. + * @returns The read value. + */ + readValueOrThrow(address: IMemorySpaceAddress): JSONValue | undefined; + + /** + * Returns the log of the transaction. + * + * The log is a list of changes that have been made to the transaction. + * It is used to track the dependencies of the transaction. + * + * If the transaction is aborted, the log reflects the attempted reads and + * writes. If the transaction is committed, the log reflects the actual reads + * and writes. + * + * @deprecated + */ + log(): IStorageTransactionLog; } export interface ITransactionReader { + did(): MemorySpace; /** * Reads a value from a (local) memory address and captures corresponding - * `Read` in the transaction invariants. If value was written in read memory - * address in this transaction read will return value that was written as - * opposed to value stored. + * `Read` in the the transaction invariants. If value was written in read + * memory address in this transaction read will return value that was written + * as opposed to value stored. * * Read will fail with `InactiveTransactionError` if transaction is no longer * active. @@ -255,24 +359,16 @@ export interface ITransactionReader { * }) * assert(w.ok) * - * assert(tx.read({ type, id, path: ['author'] }).ok === undefined) - * assert(tx.read({ type, id, path: ['author', 'address'] }).error.name === 'NotFoundError') + * assert(tx.read({ type, id, path:: ['author'] }).ok === undefined) + * assert(tx.read({ type, id, path:: ['author', 'address'] }).error.name === 'NotFoundError') * // JS specific getters are not supported - * assert(tx.read({ type, id, path: ['content', 'length'] }).ok.is === undefined) - * assert(tx.read({ type, id, path: ['title'] }).ok.is === "Hello world") + * assert(tx.read({ the, of, at: ['content', 'length'] }).ok.is === undefined) + * assert(tx.read({ the, of, at: ['title'] }).ok.is === "Hello world") * // Referencing non-existing facts produces errors - * assert(tx.read({ type: 'bad/mime' , id, path: ['author'] }).error.name === 'NotFoundError') + * assert(tx.read({ the: 'bad/mime' , of, at: ['author'] }).error.name === 'NotFoundError') * ``` */ - read( - address: IMemoryAddress, - ): Result< - Read, - | INotFoundError - | InactiveTransactionError - | IUnsupportedMediaTypeError - | IInvalidDataURIError - >; + read(address: IMemoryAddress): Result; } export interface ITransactionWriter extends ITransactionReader { @@ -285,13 +381,7 @@ export interface ITransactionWriter extends ITransactionReader { write( address: IMemoryAddress, value?: JSONValue, - ): Result< - Write, - | INotFoundError - | InactiveTransactionError - | IUnsupportedMediaTypeError - | IInvalidDataURIError - >; + ): Result; } /** @@ -328,6 +418,10 @@ export interface IStorageTransactionAborted extends Error { */ export interface IStorageTransactionInconsistent extends Error { name: "StorageTransactionInconsistent"; + + address: IMemoryAddress; + + from(space: MemorySpace): IStorageTransactionInconsistent; } /** @@ -335,21 +429,24 @@ export interface IStorageTransactionInconsistent extends Error { * no longer active. */ export type InactiveTransactionError = - | IStorageTransactionInconsistent - | IStorageTransactionAborted - | IStorageTransactionFailed + | StorageTransactionFailed | IStorageTransactionComplete; -export type IStorageTransactionError = - | IStorageTransactionAborted +export type StorageTransactionFailed = | IStorageTransactionInconsistent - | IStorageTransactionFailed; + | IStorageTransactionAborted + | IStorageTransactionRejected; -export type IStorageTransactionFailed = - | ConflictError +export type IStorageTransactionRejected = + | IConflictError + | IStoreError | TransactionError - | ConnectionError - | AuthorizationError; + | IConnectionError + | IAuthorizationError; + +export type CommitError = + | InactiveTransactionError + | IStorageTransactionRejected; export interface INotFoundError extends Error { name: "NotFoundError"; @@ -371,41 +468,45 @@ export interface IInvalidDataURIError extends Error { cause: Error; } -export type IReaderError = - | IStorageTransactionComplete - | IStorageTransactionAborted +export type ReadError = | INotFoundError - | IUnsupportedMediaTypeError - | IInvalidDataURIError; + | InactiveTransactionError + | IInvalidDataURIError + | IUnsupportedMediaTypeError; -export type IWriterError = - | IStorageTransactionComplete - | IStorageTransactionAborted - | IStorageTransactionInconsistent - | IStorageTransactionWriteIsolationError +export type WriteError = | INotFoundError | IUnsupportedMediaTypeError - | IInvalidDataURIError; + | InactiveTransactionError; + +export type ReaderError = InactiveTransactionError; + +export type WriterError = + | InactiveTransactionError + | IStorageTransactionWriteIsolationError; export interface IStorageTransactionComplete extends Error { name: "StorageTransactionCompleteError"; } +export interface INotFoundError extends Error { + name: "NotFoundError"; -export type IStorageTransactionProgress = Variant<{ - open: IStorageTransactionLog; - pending: IStorageTransactionLog; - done: IStorageTransactionLog; -}>; + /** + * Source in which address could not be resolved. + */ + source: IAttestation; + + /** + * Address that we could not resolve. + */ + address: IMemoryAddress; +} /** * Represents adddress within the memory space which is like pointer inside the * fact value in the memory. */ export interface IMemoryAddress { - /** - * Memory space to read from. - */ - space: MemorySpace; /** * URI to an entitiy. It corresponds to `of` field in the memory protocol. */ @@ -419,19 +520,120 @@ export interface IMemoryAddress { * Path to the {@link JSONValue} being reference by this address. It is path * within the `is` field of the fact in memory protocol. */ - path: MemoryAddressPathComponent[]; + path: readonly MemoryAddressPathComponent[]; +} + +export interface IMemorySpaceAddress extends IMemoryAddress { + space: MemorySpace; } -export type MemoryAddressPathComponent = string; +export type MemoryAddressPathComponent = string | number; + +export interface Assert { + the: MediaType; + of: URI; + is: JSONValue; + + claim?: void; +} + +export interface Retract { + the: MediaType; + of: URI; + is?: void; + + claim?: void; +} -export interface IStorageTransactionLog - extends Iterable { - get(address: IMemoryAddress): IStorageTransactionInvariant; +export interface Claim { + the: MediaType; + of: URI; + is?: void; + claim: true; +} + +export interface ISpace { + did(): MemorySpace; +} + +export interface ISpaceReplica extends ISpace { + /** + * Return a state for the requested entry or returns `undefined` if replica + * does not have it. + */ + get(entry: FactAddress): State | undefined; + + commit( + transaction: ITransaction, + ): Promise>; +} + +export type PushError = + | IQueryError + | IStoreError + | IConnectionError + | IConflictError + | TransactionError + | IAuthorizationError; + +export interface IStoreError extends Error { + name: "StoreError"; + cause: Error; } -export type IStorageTransactionInvariant = Variant<{ - read: Read; - write: Write; +/** + * Archive of the journal keyed by memory space. Each read attestation + * are represented as `claims` and write attestation are represented as + * `facts`. + */ +export type JournalArchive = Map; + +export interface ITransactionJournal { + activity(): Iterable; + + novelty(space: MemorySpace): Iterable; + history(space: MemorySpace): Iterable; + + reader( + space: MemorySpace, + ): Result; + + writer( + space: MemorySpace, + ): Result; + + /** + * Closes underlying transaction, making it non-editable going forward. Any + * attempts to edit it will fail. + */ + close(): Result, InactiveTransactionError>; + + /** + * Aborts underlying transaction, making it non-editable going forward. Any + * attempts to edit it will fail. + */ + abort(reason?: unknown): Result; +} + +export interface EditableJournal { + activity(): Iterable; + novelty: Iterable; + history(): Iterable; +} + +export interface ITransaction { + claims: IClaim[]; + + facts: Fact[]; +} + +export interface IStorageEdit { + for(space: MemorySpace): ITransaction; +} + +export type Activity = Variant<{ + read: IMemorySpaceAddress; + write: IMemorySpaceAddress; }>; /** @@ -453,19 +655,15 @@ export interface IStorageTransactionWriteIsolationError extends Error { } /** - * Describes read invariant of the underlaying transaction. + * Describes either observed or desired state of the memory at a specific + * address. */ -export interface Read { +export interface IAttestation { readonly address: IMemoryAddress; readonly value?: JSONValue; - readonly cause: Reference; } -/** - * Describes write invariant of the underlaying transaction. - */ -export interface Write { - readonly address: IMemoryAddress; +export interface IStorageInvariant { + readonly address: IMemorySpaceAddress; readonly value?: JSONValue; - readonly cause: Reference; } diff --git a/packages/runner/src/storage/transaction-shim.ts b/packages/runner/src/storage/transaction-shim.ts index 1309c8878..e85553b55 100644 --- a/packages/runner/src/storage/transaction-shim.ts +++ b/packages/runner/src/storage/transaction-shim.ts @@ -2,13 +2,12 @@ import { refer } from "@commontools/memory/reference"; import { isRecord } from "@commontools/utils/types"; import type { IInvalidDataURIError, - IMemoryAddress, + IMemorySpaceAddress, InactiveTransactionError, INotFoundError, - IReaderError, + IStorageInvariant, IStorageTransaction, IStorageTransactionComplete, - IStorageTransactionError, IStorageTransactionInconsistent, IStorageTransactionInvariant, IStorageTransactionLog, @@ -16,12 +15,17 @@ import type { ITransactionReader, ITransactionWriter, IUnsupportedMediaTypeError, - IWriterError, JSONValue, MemoryAddressPathComponent, + MemorySpace, Read, + ReaderError, + ReadError, Result, + StorageTransactionFailed, Write, + WriteError, + WriterError, } from "./interface.ts"; import type { IRuntime } from "../runtime.ts"; import type { DocImpl } from "../doc.ts"; @@ -44,7 +48,7 @@ function uriToEntityId(uri: string): EntityId { */ function validateParentPath( value: any, - path: MemoryAddressPathComponent[], + path: readonly MemoryAddressPathComponent[], ): INotFoundError | null { if (path.length === 0) { return null; // Root write, no validation needed @@ -102,15 +106,15 @@ function validateParentPath( class StorageTransactionLog implements IStorageTransactionLog { private log: IStorageTransactionInvariant[] = []; - get(_address: IMemoryAddress): IStorageTransactionInvariant { + get(_address: IMemorySpaceAddress): IStorageTransactionInvariant { throw new Error("Not implemented"); } - addRead(read: Read): void { + addRead(read: IStorageInvariant): void { this.log.push({ read }); } - addWrite(write: Write): void { + addWrite(write: IStorageInvariant): void { this.log.push({ write }); } @@ -125,12 +129,17 @@ class StorageTransactionLog implements IStorageTransactionLog { class TransactionReader implements ITransactionReader { constructor( protected runtime: IRuntime, + protected space: MemorySpace, protected log: StorageTransactionLog, ) {} + did() { + return this.space; + } + read( - address: IMemoryAddress, - ): Result { + address: IMemorySpaceAddress, + ): Result { if (address.type !== "application/json") { const error = new Error( "Unsupported media type", @@ -155,10 +164,9 @@ class TransactionReader implements ITransactionReader { const value = getValueAtPath(json, address.path); - const read: Read = { + const read: IStorageInvariant = { address, value, - cause: refer("shim does not care"), }; this.log.addRead(read); @@ -211,10 +219,9 @@ class TransactionReader implements ITransactionReader { } // Read from doc itself const value = doc.getAtPath(rest); - const read: Read = { + const read: IStorageInvariant = { address, value, - cause: refer("shim does not care"), }; this.log.addRead(read); return { ok: read }; @@ -234,10 +241,9 @@ class TransactionReader implements ITransactionReader { // Convert EntityId to URI string value = `of:${JSON.parse(JSON.stringify(sourceCell.entityId))["/"]}`; } - const read: Read = { + const read: IStorageInvariant = { address, value, - cause: refer("shim does not care"), }; this.log.addRead(read); return { ok: read }; @@ -257,14 +263,13 @@ class TransactionReader implements ITransactionReader { class TransactionWriter extends TransactionReader implements ITransactionWriter { write( - address: IMemoryAddress, + address: IMemorySpaceAddress, value?: any, ): Result< Write, | INotFoundError | InactiveTransactionError | IUnsupportedMediaTypeError - | IInvalidDataURIError > { if (address.type !== "application/json") { const error = new Error( @@ -321,10 +326,9 @@ class TransactionWriter extends TransactionReader } // Write to doc itself doc.setAtPath(rest, value); - const write: Write = { + const write: IStorageInvariant = { address, value, - cause: refer(address.id), }; this.log.addWrite(write); return { ok: write }; @@ -360,10 +364,9 @@ class TransactionWriter extends TransactionReader return { ok: undefined, error: notFoundError }; } doc.sourceCell = sourceDoc; - const write: Write = { + const write: IStorageInvariant = { address, value, - cause: refer(address.id), }; this.log.addWrite(write); return { ok: write }; @@ -388,7 +391,7 @@ export class StorageTransaction implements IStorageTransaction { constructor(private runtime: IRuntime) {} - status(): Result { + status(): Result { return { ok: this.currentStatus }; } @@ -396,7 +399,7 @@ export class StorageTransaction implements IStorageTransaction { return this.txLog; } - reader(space: string): Result { + reader(space: MemorySpace): Result { if (this.currentStatus.open === undefined) { const error = new Error( "Storage transaction complete", @@ -410,14 +413,14 @@ export class StorageTransaction implements IStorageTransaction { let reader = this.readers.get(space); if (!reader) { - reader = new TransactionReader(this.runtime, this.txLog); + reader = new TransactionReader(this.runtime, space, this.txLog); this.readers.set(space, reader); } return { ok: reader }; } - read(address: IMemoryAddress): Result { + read(address: IMemorySpaceAddress): Result { const readerResult = this.reader(address.space); if (readerResult.error) { return { ok: undefined, error: readerResult.error }; @@ -425,12 +428,12 @@ export class StorageTransaction implements IStorageTransaction { const readResult = readerResult.ok!.read(address); if (readResult.error) { - return { ok: undefined, error: readResult.error as IReaderError }; + return { ok: undefined, error: readResult.error }; } return { ok: readResult.ok }; } - readValueOrThrow(address: IMemoryAddress): JSONValue | undefined { + readValueOrThrow(address: IMemorySpaceAddress): JSONValue | undefined { const readResult = this.read(address); if (readResult.error && readResult.error.name !== "NotFoundError") { throw readResult.error; @@ -438,7 +441,7 @@ export class StorageTransaction implements IStorageTransaction { return readResult.ok?.value; } - writer(space: string): Result { + writer(space: MemorySpace): Result { if (this.currentStatus.open === undefined) { const error = new Error( "Storage transaction complete", @@ -464,14 +467,17 @@ export class StorageTransaction implements IStorageTransaction { let writer = this.writers.get(space); if (!writer) { - writer = new TransactionWriter(this.runtime, this.txLog); + writer = new TransactionWriter(this.runtime, space, this.txLog); this.writers.set(space, writer); } return { ok: writer }; } - write(address: IMemoryAddress, value: any): Result { + write( + address: IMemorySpaceAddress, + value: any, + ): Result { const writerResult = this.writer(address.space); if (writerResult.error) { return { ok: undefined, error: writerResult.error }; @@ -479,7 +485,7 @@ export class StorageTransaction implements IStorageTransaction { const writeResult = writerResult.ok!.write(address, value); if (writeResult.error) { - return { ok: undefined, error: writeResult.error as IWriterError }; + return { ok: undefined, error: writeResult.error as WriteError }; } return { ok: writeResult.ok }; } @@ -501,7 +507,7 @@ export class StorageTransaction implements IStorageTransaction { return { ok: undefined }; } - commit(): Promise> { + commit(): Promise> { if (this.currentStatus.open === undefined) { const error: any = new Error("Transaction already aborted"); error.name = "StorageTransactionAborted"; diff --git a/packages/runner/src/storage/transaction.ts b/packages/runner/src/storage/transaction.ts new file mode 100644 index 000000000..3aa702dc4 --- /dev/null +++ b/packages/runner/src/storage/transaction.ts @@ -0,0 +1,321 @@ +import type { + CommitError, + IAttestation, + IMemorySpaceAddress, + InactiveTransactionError, + IStorageManager, + IStorageTransaction, + IStorageTransactionAborted, + IStorageTransactionComplete, + IStorageTransactionWriteIsolationError, + ITransactionReader, + ITransactionWriter, + JSONValue, + MemorySpace, + ReaderError, + Result, + StorageTransactionFailed, + StorageTransactionStatus, + Unit, + WriteError, + WriterError, +} from "./interface.ts"; + +import * as Journal from "./transaction/journal.ts"; + +export const create = (manager: IStorageManager) => + new StorageTransaction({ + status: "ready", + storage: manager, + journal: Journal.open(manager), + writer: null, + }); + +export type EditableState = { + status: "ready"; + storage: IStorageManager; + journal: Journal.Journal; + writer: ITransactionWriter | null; +}; + +export type SumbittedState = { + status: "pending"; + journal: Journal.Journal; + promise: Promise>; +}; + +export type CompleteState = { + status: "done"; + journal: Journal.Journal; + result: Result; +}; + +export type State = + | EditableState + | SumbittedState + | CompleteState; + +/** + * Storage transaction implementation that maintains consistency guarantees + * for reads and writes across memory spaces. + */ +class StorageTransaction implements IStorageTransaction { + static mutate(transaction: StorageTransaction, state: State) { + transaction.#state = state; + } + static use(transaction: StorageTransaction): State { + return transaction.#state; + } + + #state: State; + constructor(state: State) { + this.#state = state; + } + + status(): StorageTransactionStatus { + return status(this); + } + + reader(space: MemorySpace): Result { + return reader(this, space); + } + + writer(space: MemorySpace): Result { + return writer(this, space); + } + + read(address: IMemorySpaceAddress) { + return read(this, address); + } + + write(address: IMemorySpaceAddress, value?: JSONValue) { + return write(this, address, value); + } + + abort(reason?: unknown): Result { + return abort(this, reason); + } + + commit(): Promise> { + return commit(this); + } +} + +const { mutate, use } = StorageTransaction; + +/** + * Returns given transaction status. + */ +export const status = ( + transaction: StorageTransaction, +): StorageTransactionStatus => { + const state = use(transaction); + if (state.status === "done") { + return state.result.error ? state.result : { ok: state }; + } else { + return { ok: state }; + } +}; + +/** + * Returns transaction state if it is editable otherwise fails with error. + */ +const edit = ( + transaction: StorageTransaction, +): Result => { + const state = use(transaction); + if (state.status === "ready") { + return { ok: state }; + } else { + return { error: new TransactionCompleteError() }; + } +}; + +/** + * Opens a transaction reader for the given space or fails if transaction is + * no longer editable. + */ +export const reader = ( + transaction: StorageTransaction, + space: MemorySpace, +): Result => { + const { error, ok: ready } = edit(transaction); + if (error) { + return { error }; + } else { + return ready.journal.reader(space); + } +}; + +/** + * Opens a transaction writer for the given space or fails if transaction is + * no longer editable or if writer for a different space is open. + */ +export const writer = ( + transaction: StorageTransaction, + space: MemorySpace, +): Result => { + const { error, ok: ready } = edit(transaction); + if (error) { + return { error }; + } else { + const writer = ready.writer; + if (writer) { + if (writer.did() === space) { + return { ok: writer }; + } else { + return { + error: new WriteIsolationError({ + open: writer.did(), + requested: space, + }), + }; + } + } else { + const { error, ok: writer } = ready.journal.writer(space); + if (error) { + switch (error.name) { + case "StorageTransactionCompleteError": + case "StorageTransactionAborted": { + return { error }; + } + default: { + mutate(transaction, { + status: "done", + journal: ready.journal, + result: { error }, + }); + return { error }; + } + } + } else { + ready.writer = writer; + return { ok: writer }; + } + } + } +}; + +export const read = ( + transaction: StorageTransaction, + address: IMemorySpaceAddress, +) => { + const { ok: space, error } = reader(transaction, address.space); + if (error) { + return { error }; + } else { + const { space: _, ...memoryAddress } = address; + return space.read(memoryAddress); + } +}; + +export const write = ( + transaction: StorageTransaction, + address: IMemorySpaceAddress, + value?: JSONValue, +): Result => { + const { ok: space, error } = writer(transaction, address.space); + if (error) { + return { error }; + } else { + const { space: _, ...memoryAddress } = address; + return space.write(memoryAddress, value); + } +}; + +export const abort = ( + transaction: StorageTransaction, + reason: unknown, +): Result => { + const { error, ok: ready } = edit(transaction); + if (error) { + return { error }; + } else { + const { error } = ready.journal.abort(reason); + if (error) { + return { error }; + } else { + mutate(transaction, { + status: "done", + journal: ready.journal, + result: { + error: new TransactionAborted(reason), + }, + }); + } + return { ok: {} }; + } +}; + +export const commit = async ( + transaction: StorageTransaction, +): Promise> => { + const { error, ok: ready } = edit(transaction); + if (error) { + return { error }; + } else { + const { error, ok: archive } = ready.journal.close(); + if (error) { + mutate(transaction, { + status: "done", + journal: ready.journal, + result: { error: error as StorageTransactionFailed }, + }); + return { error }; + } else { + const { writer, storage } = ready; + const replica = writer ? storage.open(writer.did()).replica : null; + const changes = replica ? archive.get(replica.did()) : null; + const promise = changes + ? replica!.commit(changes) + : Promise.resolve({ ok: {} }); + + mutate(transaction, { + status: "pending", + journal: ready.journal, + promise, + }); + + const result = await promise; + mutate(transaction, { + status: "done", + journal: ready.journal, + result, + }); + + return result; + } + } +}; + +export class TransactionCompleteError extends RangeError + implements IStorageTransactionComplete { + override name = "StorageTransactionCompleteError" as const; +} + +export class TransactionAborted extends RangeError + implements IStorageTransactionAborted { + override name = "StorageTransactionAborted" as const; + reason: unknown; + + constructor(reason?: unknown) { + super("Transaction was aborted"); + this.reason = reason; + } +} + +export class WriteIsolationError extends RangeError + implements IStorageTransactionWriteIsolationError { + override name = "StorageTransactionWriteIsolationError" as const; + open: MemorySpace; + requested: MemorySpace; + constructor( + { open, requested }: { open: MemorySpace; requested: MemorySpace }, + ) { + super( + `Can not open transaction writer for ${requested} beacuse transaction has writer open for ${open}`, + ); + this.open = open; + this.requested = requested; + } +} diff --git a/packages/runner/src/storage/transaction/address.ts b/packages/runner/src/storage/transaction/address.ts new file mode 100644 index 000000000..e57e710a2 --- /dev/null +++ b/packages/runner/src/storage/transaction/address.ts @@ -0,0 +1,27 @@ +import type { IMemoryAddress } from "../interface.ts"; +export const toString = (address: IMemoryAddress) => + `/${address.id}/${address.type}/${address.path.join("/")}`; + +/** + * Returns true if `candidate` address references location within the + * the `source` address. Otherwise returns false. + */ +export const includes = ( + source: IMemoryAddress, + candidate: IMemoryAddress, +) => + source.id === candidate.id && + source.type === candidate.type && + candidate.path.join("/").startsWith(source.path.join("/")); + +export const intersects = ( + source: IMemoryAddress, + candidate: IMemoryAddress, +) => { + if (source.id === candidate.id && source.type === candidate.type) { + const left = source.path.join("/"); + const right = candidate.path.join("/"); + return left.startsWith(right) || right.startsWith(left); + } + return false; +}; diff --git a/packages/runner/src/storage/transaction/attestation.ts b/packages/runner/src/storage/transaction/attestation.ts new file mode 100644 index 000000000..b18c7ef1d --- /dev/null +++ b/packages/runner/src/storage/transaction/attestation.ts @@ -0,0 +1,304 @@ +import type { + IAttestation, + IMemoryAddress, + INotFoundError, + ISpaceReplica, + IStorageTransactionInconsistent, + JSONValue, + MemorySpace, + Result, + State, +} from "../interface.ts"; +import { unclaimed } from "@commontools/memory/fact"; + +/** + * Takes `source` attestation, `address` and `value` and produces derived + * attestation with `value` set to a property that given `address` leads to + * in the `source`. Fails with inconsitency error if provided `address` leads + * to a non-object target. + */ +export const write = ( + source: IAttestation, + address: IMemoryAddress, + value: JSONValue | undefined, +): Result => { + const path = address.path.slice(source.address.path.length); + if (path.length === 0) { + return { ok: { ...source, value } }; + } else { + const key = path.pop()!; + const patch = { + ...source, + value: source.value === undefined + ? source.value + : JSON.parse(JSON.stringify(source.value)), + }; + + const { ok, error } = resolve(patch, { ...address, path }); + + if (error) { + return { error }; + } else { + const type = ok.value === null ? "null" : typeof ok.value; + if (type === "object") { + const target = ok.value as Record; + + // If target value is same as desired value this write is a noop + if (target[key] === value) { + return { ok: source }; + } else if (value === undefined) { + // If value is `undefined` we delete property from the tagret + delete target[key]; + } else { + // Otherwise we assign value to the target + target[key] = value; + } + + return { ok: patch }; + } else { + return { + error: new WriteInconsistency( + { address: { ...address, path }, value }, + address, + ), + }; + } + } + } +}; + +/** + * Reads requested `address` from the provided `source` attestation and either + * succeeds with derived {@link IAttestation} with the given `address` or fails + * with inconsistency error if resolving an `address` encounters a non-object + * along the path. Note it will succeed with `undefined` if last component of + * the path does not exist on the object. Below are some examples illustrating + * read behavior + * + * ```ts + * const address = { + * id: "test:1", + * type: "application/json", + * path: [] + * } + * const value = { hello: "world", from: { user: { name: "Alice" } } } + * const source = { address, value } + * + * read({ ...address, path: [] }, source) + * // { ok: { address, value } } + * read({ ...address, path: ['hello'] }, source) + * // { ok: { address: { ...address, path: ['hello'] }, value: "hello" } } + * read({ ...address, path: ['hello', 'length'] }, source) + * // { ok: { address: { ...address, path: ['hello'] }, value: undefined } } + * read({ ...address, path: ['hello', 0] }, source) + * // { ok: { address: { ...address, path: ['hello', 0] }, value: undefined } } + * read({ ...address, path: ['hello', 0, 0] }, source) + * // { error } + * read({ ...address, path: ['from', 'user'] }, source) + * // { ok: { address: { ...address, path: ['from', 'user'] }, value: {name: "Alice"} } } + * + * const empty = { address, value: undefined } + * read(address, empty) + * // { ok: { address, value: undefined } } + * read({ ...address, path: ['a'] }, empty) + * // { error } + * ``` + */ +export const read = ( + source: IAttestation, + address: IMemoryAddress, +) => resolve(source, address); + +/** + * Takes a source fact {@link State} and derives an attestion describing it's + * state. + */ +export const attest = ({ the, of, is }: Omit): IAttestation => { + return { + address: { id: of, type: the, path: [] }, + value: is, + }; +}; + +/** + * Verifies consistency of provided attestation with a given replica. If + * current state matches provided attestation function succeeds with a state + * of the fact in the given replica otherwise function fails with + * `IStorageTransactionInconsistent` error. + */ +export const claim = ( + { address, value: expected }: IAttestation, + replica: ISpaceReplica, +): Result => { + const [the, of] = [address.type, address.id]; + const state = replica.get({ the, of }) ?? unclaimed({ the, of }); + const source = attest(state); + const actual = read(source, address)?.ok?.value; + + if (JSON.stringify(expected) === JSON.stringify(actual)) { + return { ok: state }; + } else { + return { + error: new StateInconsistency({ address, expected, actual }), + }; + } +}; + +/** + * Attempts to resolve given `address` from the `source` attestation. Function + * succeeds with derived attestation that will have provided `address` or fails + * with inconsistency error if resolving an address encounters non-object along + * the resolution path. + */ +export const resolve = ( + source: IAttestation, + address: IMemoryAddress, +): Result => { + const { path } = address; + let at = source.address.path.length - 1; + let value = source.value; + while (++at < path.length) { + const key = path[at]; + if (typeof value === "object" && value != null) { + // We do not support array.length as that is JS specific getter. + value = Array.isArray(value) && key === "length" + ? undefined + : (value as Record)[key]; + } else { + return { + error: new ReadInconsistency({ + address: { + ...address, + path: path.slice(0, at), + }, + value, + }, address), + }; + } + } + + return { ok: { value, address } }; +}; + +export class NotFound extends RangeError implements INotFoundError { + override name = "NotFoundError" as const; + + constructor( + public source: IAttestation, + public address: IMemoryAddress, + public space?: MemorySpace, + ) { + const message = [ + `Can not resolve the "${address.type}" of "${address.id}" at "${ + address.path.join(".") + }"`, + space ? ` from "${space}"` : "", + `, because encountered following non-object at ${ + source.address.path.join(".") + }:`, + source.value === undefined ? source.value : JSON.stringify(source.value), + ].join(""); + + super(message); + } + + from(space: MemorySpace) { + return new NotFound(this.source, this.address, space); + } +} + +export class WriteInconsistency extends RangeError + implements IStorageTransactionInconsistent { + override name = "StorageTransactionInconsistent" as const; + + constructor( + public source: IAttestation, + public address: IMemoryAddress, + public space?: MemorySpace, + ) { + const message = [ + `Transaction consistency violated: cannot write the "${address.type}" of "${address.id}" at "${ + address.path.join(".") + }"`, + space ? ` in space "${space}"` : "", + `. Write operation expected an object at path "${ + source.address.path.join(".") + }" but encountered: ${ + source.value === undefined ? "undefined" : JSON.stringify(source.value) + }`, + ].join(""); + + super(message); + } + + from(space: MemorySpace) { + return new WriteInconsistency(this.source, this.address, space); + } +} + +export class ReadInconsistency extends RangeError + implements IStorageTransactionInconsistent { + override name = "StorageTransactionInconsistent" as const; + + constructor( + public source: IAttestation, + public address: IMemoryAddress, + public space?: MemorySpace, + ) { + const message = [ + `Transaction consistency violated: cannot read "${address.type}" of "${address.id}" at "${ + address.path.join(".") + }"`, + space ? ` in space "${space}"` : "", + `. Read operation expected an object at path "${ + source.address.path.join(".") + }" but encountered: ${ + source.value === undefined ? "undefined" : JSON.stringify(source.value) + }`, + ].join(""); + + super(message); + } + + from(space: MemorySpace) { + return new ReadInconsistency(this.source, this.address, space); + } +} + +export class StateInconsistency extends RangeError + implements IStorageTransactionInconsistent { + override name = "StorageTransactionInconsistent" as const; + + constructor( + public source: { + address: IMemoryAddress; + expected?: JSONValue; + actual?: JSONValue; + space?: MemorySpace; + }, + ) { + const { address, space, expected, actual } = source; + const message = [ + `Transaction consistency violated: The "${address.type}" of "${address.id}" at "${ + address.path.join(".") + }"`, + space ? ` in space "${space}"` : "", + ` hash changed. Previously it used to be:\n `, + expected === undefined ? "undefined" : JSON.stringify(expected), + "\n and currently it is:\n ", + actual === undefined ? "undefined" : JSON.stringify(actual), + ].join(""); + + super(message); + } + get address() { + return this.source.address; + } + + from(space: MemorySpace) { + return new StateInconsistency({ + ...this.source, + space, + }); + } +} diff --git a/packages/runner/src/storage/transaction/chronicle.ts b/packages/runner/src/storage/transaction/chronicle.ts new file mode 100644 index 000000000..a258ef6b0 --- /dev/null +++ b/packages/runner/src/storage/transaction/chronicle.ts @@ -0,0 +1,452 @@ +import type { + IAttestation, + IMemoryAddress, + ISpaceReplica, + IStorageTransactionInconsistent, + ITransaction, + JSONValue, + MemorySpace, + Result, + State, +} from "../interface.ts"; +import * as Address from "./address.ts"; +import { + attest, + claim, + read, + StateInconsistency, + write, +} from "./attestation.ts"; +import { unclaimed } from "@commontools/memory/fact"; +import { refer } from "merkle-reference"; +import * as Edit from "./edit.ts"; + +export const open = (replica: ISpaceReplica) => new Chronicle(replica); + +export class Chronicle { + #replica: ISpaceReplica; + #history: History; + #novelty: Novelty; + + constructor(replica: ISpaceReplica) { + this.#replica = replica; + this.#history = new History(replica.did()); + this.#novelty = new Novelty(replica.did()); + } + did() { + return this.#replica.did(); + } + + novelty(): Iterable { + return this.#novelty.changes(); + } + + *history(): Iterable { + yield* this.#history; + } + + /** + * Loads a fact correstate to passed memory address from the underlying + * replica. If fact is not found in the replica return unclaimed state + * assuming no such fact exists yet. + */ + load(address: Omit): State { + const [the, of] = [address.type, address.id]; + // If we have not read nor written into overlapping memory address so + // we'll read it from the local replica. + return this.#replica.get({ the, of }) ?? unclaimed({ the, of }); + } + + /** + * Takes an invariant and applies all the changes that were written to this + * chonicle that fall under the given source. + */ + rebase(source: IAttestation) { + const changes = this.#novelty.select(source.address); + return changes ? changes.rebase(source) : { ok: source }; + } + + write( + address: IMemoryAddress, + value?: JSONValue, + ): Result { + // Validate against current state (replica + any overlapping novelty) + const loaded = attest(this.load(address)); + const rebase = this.rebase(loaded); + if (rebase.error) { + return rebase; + } + + const { error } = write(rebase.ok, address, value); + if (error) { + return { error }; + } + + return this.#novelty.claim({ address, value }); + } + + read( + address: IMemoryAddress, + ): Result { + // If we previously wrote into overlapping memory address we simply + // read from it. + const written = this.#novelty.get(address); + if (written) { + return read(written, address); + } + + // If we have not read nor written into overlapping memory address so + // we'll read it from the local replica. + const loaded = attest(this.load(address)); + const { error, ok: invariant } = read(loaded, address); + if (error) { + return { error }; + } else { + // Capture the original replica read in history (for validation) + const claim = this.#history.claim(invariant); + if (claim.error) { + return claim; + } + + // Apply any overlapping writes from novelty and return merged result + const rebase = this.rebase(invariant); + if (rebase.error) { + return rebase; + } else { + return read(rebase.ok, address); + } + } + } + + /** + * Attempts to derives transaction that can be commited to an underlying + * replica. Function fails with {@link IStorageTransactionInconsistent} if + * this contains somer read invariant that no longer holds, that is same + * read produces different result. + */ + commit(): Result< + ITransaction, + IStorageTransactionInconsistent + > { + const edit = Edit.create(); + const replica = this.#replica; + // Go over all read invariants, verify their consistency and add them as + // edit claims. + for (const invariant of this.history()) { + const { ok: state, error } = claim(invariant, replica); + + if (error) { + return { error }; + } else { + edit.claim(state); + } + } + + for (const changes of this.#novelty) { + const loaded = this.load(changes.address); + const source = attest(loaded); + const { error, ok: merged } = changes.rebase(source); + if (error) { + return { error }; + } // + // If merged value is `undefined` and loaded fact was retraction + // we simply claim loaded state. Otherwise we retract loaded fact + else if (merged.value === undefined) { + if (loaded.is === undefined) { + edit.claim(loaded); + } else { + edit.retract(loaded); + } + } // + // If merged value is not `undefined` we create an assertion referring + // to the loaded fact in a causal reference. + else { + edit.assert({ + ...loaded, + is: merged.value, + cause: refer(loaded), + }); + } + } + + return { ok: edit.build() }; + } +} + +class History { + #model: Map = new Map(); + #space: MemorySpace; + constructor(space: MemorySpace) { + this.#space = space; + } + + get space() { + return this.#space; + } + *[Symbol.iterator]() { + yield* this.#model.values(); + } + + /** + * Gets {@link Attestation} for the given `address` from which we + * could read out the value. Note that returned invariant may not have exact + * same `path` as the provided by the address, but if one is returned it will + * have either exact same path or a parent path. + * + * @example + * ```ts + * const alice = { + * address: { id: 'user:1', type: 'application/json', path: ['profile'] } + * value: { name: "Alice", email: "alice@web.mail" } + * } + * const history = new MemorySpaceHistory() + * history.put(alice) + * + * history.get(alice.address) === alice + * // Lookup nested path still returns `alice` + * history.get({ + * id: 'user:1', + * type: 'application/json', + * path: ['profile', 'name'] + * }) === alice + * ``` + */ + get(address: IMemoryAddress): IAttestation | undefined { + let candidate: undefined | IAttestation = undefined; + for (const invariant of this) { + // If `address` is contained in inside an invariant address it is a + // candidate invariant. If this candidate has longer path than previous + // candidate this is a better match so we pick this one. + if (Address.includes(invariant.address, address)) { + if (!candidate) { + candidate = invariant; + } else if ( + candidate.address.path.length < invariant.address.path.length + ) { + candidate = invariant; + } + } + } + + return candidate; + } + + /** + * Claims an new read invariant while ensuring consistency with all the + * privous invariants. + */ + claim( + attestation: IAttestation, + ): Result { + // Track which invariants to delete after consistency check + const obsolete = new Set(); + + for (const candidate of this) { + // If we have an existing invariant that is either child or a parent of + // the new one two must be consistent with one another otherwise we are in + // an inconsistent state. + if (Address.intersects(attestation.address, candidate.address)) { + // Always read at the more specific (longer) path for consistency check + const address = + attestation.address.path.length > candidate.address.path.length + ? attestation.address + : candidate.address; + + const expected = read(candidate, address).ok?.value; + const actual = read(attestation, address).ok?.value; + + if (JSON.stringify(expected) !== JSON.stringify(actual)) { + return { + error: new StateInconsistency({ + address, + expected, + actual, + }), + }; + } + + // If consistent, determine which invariant(s) to keep + if (attestation.address.path.length === candidate.address.path.length) { + // Same exact address - replace the existing invariant + // No need to mark as obsolete, just overwrite + continue; + } else if (candidate.address === address) { + // New invariant is a child of existing candidate (candidate is parent) + // Drop the child invariant as it's redundant with the parent + obsolete.add(attestation); + } else if (attestation.address === address) { + // New invariant is a parent of existing candidate (candidate is child) + // Delete the child candidate as it's redundant with the new parent + obsolete.add(candidate); + } + } + } + + if (!obsolete.has(attestation)) { + this.put(attestation); + } + + // Delete redundant child invariants + for (const attestation of obsolete) { + this.delete(attestation); + } + + return { ok: attestation }; + } + + put(attestation: IAttestation) { + this.#model.set(Address.toString(attestation.address), attestation); + } + delete(attestation: IAttestation) { + this.#model.delete(Address.toString(attestation.address)); + } +} + +class Novelty { + #model: Map = new Map(); + #space: MemorySpace; + constructor(space: MemorySpace) { + this.#space = space; + } + + get did() { + return this.#space; + } + + edit(address: IMemoryAddress) { + const key = `${address.id}/${address.type}`; + const changes = this.#model.get(key); + if (changes) { + return changes; + } else { + const changes = new Changes(address); + this.#model.set(key, changes); + return changes; + } + } + get(address: IMemoryAddress) { + return this.select(address)?.get(address.path); + } + + /** + * Claims a new write invariant, merging it with existing parent invariants + * when possible instead of keeping both parent and child separately. + */ + claim( + invariant: IAttestation, + ): Result { + const candidates = this.edit(invariant.address); + + for (const candidate of candidates) { + // If the candidate is a parent of the new invariant, merge the new invariant + // into the existing parent invariant. + if (Address.includes(candidate.address, invariant.address)) { + const { error, ok: merged } = write( + candidate, + invariant.address, + invariant.value, + ); + + if (error) { + return { error }; + } else { + candidates.put(merged); + return { ok: merged }; + } + } + } + + // If we did not found any parents we may have some children + // that will be replaced by this invariant + for (const candidate of candidates) { + if (Address.includes(candidate.address, invariant.address)) { + candidates.delete(candidate); + } + } + + // Store this invariant + candidates.put(invariant); + + return { ok: invariant }; + } + + [Symbol.iterator]() { + return this.#model.values(); + } + + *changes(): Iterable { + for (const changes of this) { + yield* changes; + } + } + + /** + * Returns changes for the fact provided address links to. + */ + select(address: IMemoryAddress) { + return this.#model.get(`${address.id}/${address.type}`); + } +} + +class Changes { + #model: Map = new Map(); + address: IMemoryAddress; + constructor(address: Omit) { + this.address = { ...address, path: [] }; + } + + get(at: IMemoryAddress["path"]): IAttestation | undefined { + let candidate: undefined | IAttestation = undefined; + for (const invariant of this.#model.values()) { + // Check if invariant's path is a prefix of requested path + const path = invariant.address.path.join("/"); + + // For exact match or if invariant is parent of requested path + if (at.join("/").startsWith(path)) { + const size = invariant.address.path.length; + if (candidate?.address?.path?.length ?? -1 < size) { + candidate = invariant; + } + } + } + + return candidate; + } + + put(invariant: IAttestation) { + this.#model.set(invariant.address.path.join("/"), invariant); + } + delete(invariant: IAttestation) { + this.#model.delete(invariant.address.path.join("/")); + } + + /** + * Applies all the overlapping write invariants onto a given source invariant. + */ + + rebase( + source: IAttestation, + ): Result { + let merged = source; + for (const change of this.#model.values()) { + if (Address.includes(source.address, change.address)) { + const { error, ok } = write( + merged, + change.address, + change.value, + ); + if (error) { + return { error }; + } else { + merged = ok; + } + } + } + + return { ok: merged }; + } + + [Symbol.iterator](): IterableIterator { + return this.#model.values(); + } +} diff --git a/packages/runner/src/storage/transaction/edit.ts b/packages/runner/src/storage/transaction/edit.ts new file mode 100644 index 000000000..8f47ec00b --- /dev/null +++ b/packages/runner/src/storage/transaction/edit.ts @@ -0,0 +1,45 @@ +import type { + Assertion, + Fact, + IClaim, + ITransaction, + State, +} from "../interface.ts"; +import { retract } from "@commontools/memory/fact"; +import { refer } from "merkle-reference"; + +/** + * Memory space atomic update builder. + */ +class Edit implements ITransaction { + #claims: IClaim[] = []; + #facts: Fact[] = []; + + claim(state: State) { + this.#claims.push({ + the: state.the, + of: state.of, + fact: refer(state), + }); + } + retract(fact: Assertion) { + this.#facts.push(retract(fact)); + } + + assert(fact: Assertion) { + this.#facts.push(fact); + } + + get claims() { + return this.#claims; + } + get facts() { + return this.#facts; + } + + build(): ITransaction { + return this; + } +} + +export const create = () => new Edit(); diff --git a/packages/runner/src/storage/transaction/journal.ts b/packages/runner/src/storage/transaction/journal.ts new file mode 100644 index 000000000..349b09472 --- /dev/null +++ b/packages/runner/src/storage/transaction/journal.ts @@ -0,0 +1,356 @@ +import type { + Activity, + IAttestation, + IMemoryAddress, + InactiveTransactionError, + IStorageManager, + IStorageTransactionAborted, + IStorageTransactionComplete, + IStorageTransactionInconsistent, + ITransactionJournal, + ITransactionReader, + ITransactionWriter, + JournalArchive, + JSONValue, + MemorySpace, + ReadError, + Result, + WriteError, +} from "../interface.ts"; +import * as Chronicle from "./chronicle.ts"; + +export interface UnknownState { + branches: Map; + activity: Activity[]; +} + +export interface OpenState extends UnknownState { + status: "open"; + storage: IStorageManager; + readers: Map; + writers: Map; +} + +export interface ClosedState extends UnknownState { + status: "closed"; + reason: Result< + JournalArchive, + IStorageTransactionAborted | IStorageTransactionInconsistent + >; +} + +export type State = OpenState | ClosedState; +export type IJournal = { state: State }; + +export type { Journal }; +/** + * Class for maintaining lifecycle of the storage transaction. It's job is to + * have central place to manage state of the transaction and prevent readers / + * writers from making to mutate transaction after it's being commited. + */ +class Journal implements IJournal, ITransactionJournal { + #state: State; + constructor(state: State) { + this.#state = state; + } + + get state() { + return this.#state; + } + + set state(newState: State) { + this.#state = newState; + } + + get status() { + return this.#state.status; + } + + activity() { + return this.#state.activity; + } + + *novelty(space: MemorySpace) { + const branch = this.#state.branches.get(space); + if (branch) { + yield* branch.novelty(); + } + } + + *history(space: MemorySpace) { + const branch = this.#state.branches.get(space); + if (branch) { + yield* branch.history(); + } + } + + reader(space: MemorySpace) { + return reader(this, space); + } + writer(space: MemorySpace) { + return writer(this, space); + } + close() { + return close(this); + } + abort(reason: unknown) { + return abort(this, reason); + } +} + +export const read = ( + journal: IJournal, + space: MemorySpace, + address: IMemoryAddress, +): Result => { + const { ok: branch, error } = checkout(journal, space); + if (error) { + return { error }; + } else { + const result = branch.read(address); + if (result.error) { + return { error: result.error.from(space) }; + } else { + // Track read activity + journal.state.activity.push({ read: { ...address, space } }); + return result; + } + } +}; + +export const write = ( + journal: IJournal, + space: MemorySpace, + address: IMemoryAddress, + value?: JSONValue, +): Result => { + const { ok: branch, error } = checkout(journal, space); + if (error) { + return { error }; + } else { + const result = branch.write(address, value); + if (result.error) { + return { error: result.error.from(space) }; + } else { + // Track write activity + journal.state.activity.push({ write: { ...address, space } }); + return result; + } + } +}; + +const checkout = ( + journal: IJournal, + space: MemorySpace, +): Result => { + const { ok: open, error } = edit(journal); + if (error) { + return { error }; + } else { + const branch = open.branches.get(space); + if (branch) { + return { ok: branch }; + } else { + const { replica } = open.storage.open(space); + const branch = Chronicle.open(replica); + open.branches.set(space, branch); + return { ok: branch }; + } + } +}; + +const edit = ( + { state }: IJournal, +): Result => { + if (state.status === "closed") { + if (state.reason.error) { + return state.reason; + } else { + return { + error: new TransactionCompleteError(`Journal is closed`), + }; + } + } else { + return { ok: state }; + } +}; + +export const reader = ( + journal: IJournal, + space: MemorySpace, +): Result => { + const { ok: open, error } = edit(journal); + if (error) { + return { error }; + } else { + // Otherwise we lookup a a reader for the requested `space`, if we one + // already exists return it otherwise create one and return it. + const reader = open.readers.get(space); + if (reader) { + return { ok: reader }; + } else { + const reader = new TransactionReader(journal, space); + + // Store reader so that subsequent attempts calls of this method. + open.readers.set(space, reader); + return { ok: reader }; + } + } +}; + +export const writer = ( + journal: IJournal, + space: MemorySpace, +): Result => { + // Obtait edit session for this journal, if it fails journal is + // no longer open, in which case we propagate error. + const { ok: open, error } = edit(journal); + if (error) { + return { error }; + } else { + // If we obtained open journal lookup a writer for the given `space`, if we + // have one return it otherwise create a new one and return it instead. + const writer = open.writers.get(space); + if (writer) { + return { ok: writer }; + } else { + const writer = new TransactionWriter(journal, space); + + // Store writer so that subsequent attempts calls of this method. + open.writers.set(space, writer); + return { ok: writer }; + } + } +}; + +export const abort = (journal: IJournal, reason: unknown) => { + const { ok: open, error } = edit(journal); + if (error) { + return { error }; + } else { + journal.state = { + branches: open.branches, + activity: open.activity, + status: "closed", + reason: { error: new TransactionAborted(reason) }, + }; + + return { ok: journal }; + } +}; + +export const close = (journal: IJournal) => { + const { ok: open, error } = edit(journal); + if (error) { + return { error }; + } else { + const archive: JournalArchive = new Map(); + for (const [space, chronicle] of open.branches) { + const { error, ok } = chronicle.commit(); + if (error) { + journal.state = { + branches: open.branches, + activity: open.activity, + status: "closed", + reason: { error }, + }; + return { error }; + } else { + archive.set(space, ok); + } + } + + journal.state = { + branches: open.branches, + activity: open.activity, + status: "closed", + reason: { ok: archive }, + }; + + return { ok: archive }; + } +}; + +export const open = (storage: IStorageManager) => + new Journal({ + status: "open", + storage, + activity: [], + branches: new Map(), + readers: new Map(), + writers: new Map(), + }); + +/** + * Transaction reader implementation for reading from a specific memory space. + * Maintains its own set of Read invariants and can consult Write changes. + */ +export class TransactionReader implements ITransactionReader { + #journal: IJournal; + #space: MemorySpace; + + constructor( + journal: IJournal, + space: MemorySpace, + ) { + this.#journal = journal; + this.#space = space; + } + did(): MemorySpace { + return this.#space; + } + + read(address: IMemoryAddress) { + return read(this.#journal, this.#space, address); + } +} + +/** + * Transaction writer implementation that wraps a TransactionReader + * and maintains its own set of Write changes. + */ +export class TransactionWriter implements ITransactionWriter { + #journal: IJournal; + #space: MemorySpace; + + constructor( + journal: IJournal, + space: MemorySpace, + ) { + this.#journal = journal; + this.#space = space; + } + did(): MemorySpace { + return this.#space; + } + + read(address: IMemoryAddress) { + return read(this.#journal, this.#space, address); + } + + /** + * Attempts to write a value at a given memory address and captures relevant + */ + write( + address: IMemoryAddress, + value?: JSONValue, + ) { + return write(this.#journal, this.#space, address, value); + } +} + +export class TransactionCompleteError extends RangeError + implements IStorageTransactionComplete { + override name = "StorageTransactionCompleteError" as const; +} + +export class TransactionAborted extends RangeError + implements IStorageTransactionAborted { + override name = "StorageTransactionAborted" as const; + reason: unknown; + + constructor(reason?: unknown) { + super("Transaction was aborted"); + this.reason = reason; + } +} diff --git a/packages/runner/src/traverse.ts b/packages/runner/src/traverse.ts index 0c6daf2ad..cb7d7f627 100644 --- a/packages/runner/src/traverse.ts +++ b/packages/runner/src/traverse.ts @@ -375,7 +375,7 @@ function followPointer( // Also insert the portions of cellTarget.path, so selector is relative to new target doc // We do this even if the target doc is the same doc, since we want the // selector path to match. - selector = narrowSchema(doc.path, selector, link.path); + selector = narrowSchema(doc.path, selector, link.path as string[]); } if (link.id !== undefined) { // We have a reference to a different cell, so track the dependency @@ -420,7 +420,7 @@ function followPointer( path: [], value: targetDocRoot, }, - [...link.path, ...path], + [...link.path, ...path] as string[], tracker, schemaTracker, selector, @@ -505,7 +505,7 @@ function loadLinkedRecipe( function narrowSchema( docPath: string[], selector: SchemaPathSelector, - targetPath: string[], + targetPath: readonly string[], ): SchemaPathSelector { let docPathIndex = 0; while (docPathIndex < docPath.length && docPathIndex < selector.path.length) { diff --git a/packages/runner/test/address.test.ts b/packages/runner/test/address.test.ts new file mode 100644 index 000000000..0334a36bd --- /dev/null +++ b/packages/runner/test/address.test.ts @@ -0,0 +1,559 @@ +import { describe, it } from "@std/testing/bdd"; +import { expect } from "@std/expect"; +import * as Address from "../src/storage/transaction/address.ts"; + +describe("Address Module", () => { + describe("toString function", () => { + it("should convert address with empty path to string", () => { + const address = { + id: "user:1", + type: "application/json", + path: [], + } as const; + + const result = Address.toString(address); + + expect(result).toBe("/user:1/application/json/"); + }); + + it("should convert address with single path element to string", () => { + const address = { + id: "user:1", + type: "application/json", + path: ["profile"], + } as const; + + const result = Address.toString(address); + + expect(result).toBe("/user:1/application/json/profile"); + }); + + it("should convert address with nested path to string", () => { + const address = { + id: "user:1", + type: "application/json", + path: ["profile", "settings", "theme"], + } as const; + + const result = Address.toString(address); + + expect(result).toBe("/user:1/application/json/profile/settings/theme"); + }); + + it("should handle address with numeric path elements", () => { + const address = { + id: "array:1", + type: "application/json", + path: ["items", "0", "name"], + } as const; + + const result = Address.toString(address); + + expect(result).toBe("/array:1/application/json/items/0/name"); + }); + + it("should handle address with special characters in id", () => { + const address = { + id: "user:special-chars_123", + type: "application/json", + path: ["data"], + } as const; + + const result = Address.toString(address); + + expect(result).toBe("/user:special-chars_123/application/json/data"); + }); + + it("should handle different content types", () => { + const address = { + id: "document:1", + type: "text/plain", + path: ["metadata", "title"], + } as const; + + const result = Address.toString(address); + + expect(result).toBe("/document:1/text/plain/metadata/title"); + }); + }); + + describe("includes function", () => { + it("should return true when source includes candidate (source is parent)", () => { + const source = { + id: "user:1", + type: "application/json", + path: [], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const result = Address.includes(source, candidate); + + expect(result).toBe(true); + }); + + it("should return true when source includes candidate (partial path)", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile", "settings", "theme"], + } as const; + + const result = Address.includes(source, candidate); + + expect(result).toBe(true); + }); + + it("should return true when candidate is same as source", () => { + const address = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const result = Address.includes(address, address); + + expect(result).toBe(true); + }); + + it("should return false when source does not include candidate", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile"], + } as const; + + const result = Address.includes(source, candidate); + + expect(result).toBe(false); + }); + + it("should return false when addresses have different ids", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const candidate = { + id: "user:2", + type: "application/json", + path: ["profile"], + } as const; + + const result = Address.includes(source, candidate); + + expect(result).toBe(false); + }); + + it("should return false when addresses have different types", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const candidate = { + id: "user:1", + type: "text/plain", + path: ["profile"], + } as const; + + const result = Address.includes(source, candidate); + + expect(result).toBe(false); + }); + + it("should return false when paths are completely different", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["settings"], + } as const; + + const result = Address.includes(source, candidate); + + expect(result).toBe(false); + }); + + it("should return false when paths share prefix but are not parent-child", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile", "email"], + } as const; + + const result = Address.includes(source, candidate); + + expect(result).toBe(false); + }); + + it("should handle array index paths correctly", () => { + const source = { + id: "list:1", + type: "application/json", + path: ["items", "0"], + } as const; + + const candidate = { + id: "list:1", + type: "application/json", + path: ["items", "0", "name"], + } as const; + + const result = Address.includes(source, candidate); + + expect(result).toBe(true); + }); + + it("should handle numeric path prefix matching", () => { + const source = { + id: "list:1", + type: "application/json", + path: ["items", "1"], + } as const; + + const candidate = { + id: "list:1", + type: "application/json", + path: ["items", "10"], + } as const; + + const result = Address.includes(source, candidate); + + // "items/10" starts with "items/1", so source includes candidate + expect(result).toBe(true); + }); + }); + + describe("intersects function", () => { + it("should return true when addresses are identical", () => { + const address = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const result = Address.intersects(address, address); + + expect(result).toBe(true); + }); + + it("should return true when source is parent of candidate", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(true); + }); + + it("should return true when candidate is parent of source", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile", "settings", "theme"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(true); + }); + + it("should return true when one path is empty (root)", () => { + const source = { + id: "user:1", + type: "application/json", + path: [], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(true); + }); + + it("should return false when addresses have different ids", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile"], + } as const; + + const candidate = { + id: "user:2", + type: "application/json", + path: ["profile", "name"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(false); + }); + + it("should return false when addresses have different types", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile"], + } as const; + + const candidate = { + id: "user:1", + type: "text/plain", + path: ["profile", "name"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(false); + }); + + it("should return false when paths are completely disjoint", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["settings", "theme"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(false); + }); + + it("should return false when paths share prefix but neither contains the other", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile", "name"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile", "email"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(false); + }); + + it("should handle deep nesting correctly", () => { + const source = { + id: "doc:1", + type: "application/json", + path: ["data", "section", "paragraph", "sentence"], + } as const; + + const candidate = { + id: "doc:1", + type: "application/json", + path: ["data", "section"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(true); + }); + + it("should handle array indices correctly", () => { + const source = { + id: "list:1", + type: "application/json", + path: ["items", "0"], + } as const; + + const candidate = { + id: "list:1", + type: "application/json", + path: ["items", "0", "properties"], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(true); + }); + + it("should handle prefix matching with similar array indices", () => { + const source = { + id: "list:1", + type: "application/json", + path: ["items", "1"], + } as const; + + const candidate = { + id: "list:1", + type: "application/json", + path: ["items", "10"], + } as const; + + const result = Address.intersects(source, candidate); + + // "items/1" is a prefix of "items/10", so they intersect + expect(result).toBe(true); + }); + + it("should handle edge case with empty string in path", () => { + const source = { + id: "test:1", + type: "application/json", + path: ["", "data"], + } as const; + + const candidate = { + id: "test:1", + type: "application/json", + path: [""], + } as const; + + const result = Address.intersects(source, candidate); + + expect(result).toBe(true); + }); + + it("should be symmetric", () => { + const source = { + id: "user:1", + type: "application/json", + path: ["profile"], + } as const; + + const candidate = { + id: "user:1", + type: "application/json", + path: ["profile", "settings", "theme"], + } as const; + + const result1 = Address.intersects(source, candidate); + const result2 = Address.intersects(candidate, source); + + expect(result1).toBe(result2); + expect(result1).toBe(true); + }); + }); + + describe("Edge Cases", () => { + it("should handle addresses with empty paths consistently", () => { + const address1 = { + id: "user:1", + type: "application/json", + path: [], + } as const; + + const address2 = { + id: "user:1", + type: "application/json", + path: [], + } as const; + + expect(Address.toString(address1)).toBe("/user:1/application/json/"); + expect(Address.includes(address1, address2)).toBe(true); + expect(Address.intersects(address1, address2)).toBe(true); + }); + + it("should handle addresses with complex ids", () => { + const address = { + id: "namespace:complex-id-with-dashes_and_underscores.123", + type: "application/vnd.api+json", + path: ["data", "attributes", "nested-property"], + } as const; + + const result = Address.toString(address); + + expect(result).toBe( + "/namespace:complex-id-with-dashes_and_underscores.123/application/vnd.api+json/data/attributes/nested-property", + ); + }); + + it("should handle path elements that could confuse string operations", () => { + const source = { + id: "test:1", + type: "application/json", + path: ["path"], + } as const; + + const candidate = { + id: "test:1", + type: "application/json", + path: ["path", "path/with/slashes"], + } as const; + + // Even though the path element contains slashes, the function should work correctly + expect(Address.includes(source, candidate)).toBe(true); + expect(Address.intersects(source, candidate)).toBe(true); + }); + + it("should handle numeric strings in paths with prefix matching", () => { + const source = { + id: "test:1", + type: "application/json", + path: ["items", "12"], + } as const; + + const candidate = { + id: "test:1", + type: "application/json", + path: ["items", "123"], + } as const; + + // "items/123" starts with "items/12", so source includes candidate and they intersect + expect(Address.includes(source, candidate)).toBe(true); + expect(Address.intersects(source, candidate)).toBe(true); + }); + }); +}); diff --git a/packages/runner/test/attestation.test.ts b/packages/runner/test/attestation.test.ts new file mode 100644 index 000000000..2e5c21212 --- /dev/null +++ b/packages/runner/test/attestation.test.ts @@ -0,0 +1,692 @@ +import { afterEach, beforeEach, describe, it } from "@std/testing/bdd"; +import { expect } from "@std/expect"; +import { Identity } from "@commontools/identity"; +import { StorageManager } from "@commontools/runner/storage/cache.deno"; +import { assert, unclaimed } from "@commontools/memory/fact"; +import * as Attestation from "../src/storage/transaction/attestation.ts"; + +const signer = await Identity.fromPassphrase("attestation test"); +const space = signer.did(); + +describe("Attestation Module", () => { + let storage: ReturnType; + let replica: any; + + beforeEach(() => { + storage = StorageManager.emulate({ as: signer }); + replica = storage.open(space).replica; + }); + + afterEach(async () => { + await storage?.close(); + }); + + describe("write function", () => { + it("should write to root path (empty path)", () => { + const source = { + address: { id: "test:1", type: "application/json", path: [] }, + value: { name: "Alice" }, + } as const; + + const result = Attestation.write(source, source.address, { name: "Bob" }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual({ name: "Bob" }); + expect(result.ok?.address).toEqual(source.address); + }); + + it("should write to nested path", () => { + const source = { + address: { id: "test:2", type: "application/json", path: [] }, + value: { user: { name: "Alice", age: 30 } }, + } as const; + + const result = Attestation.write(source, { + id: "test:2", + type: "application/json", + path: ["user", "name"], + }, "Bob"); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual({ user: { name: "Bob", age: 30 } }); + }); + + it("should create new nested properties", () => { + const source = { + address: { id: "test:3", type: "application/json", path: [] }, + value: { user: {} }, + } as const; + + const result = Attestation.write(source, { + id: "test:3", + type: "application/json", + path: ["user", "settings"], + }, { theme: "dark" }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual({ + user: { settings: { theme: "dark" } }, + }); + }); + + it("should delete properties with undefined value", () => { + const source = { + address: { id: "test:4", type: "application/json", path: [] }, + value: { name: "Alice", age: 30, active: true }, + } as const; + + const result = Attestation.write(source, { + id: "test:4", + type: "application/json", + path: ["age"], + }, undefined); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual({ name: "Alice", active: true }); + }); + + it("should return original source when value is unchanged", () => { + const source = { + address: { id: "test:5", type: "application/json", path: [] }, + value: { name: "Alice", age: 30 }, + } as const; + + const result = Attestation.write(source, { + id: "test:5", + type: "application/json", + path: ["name"], + }, "Alice"); + + expect(result.ok).toBe(source); + }); + + it("should fail when writing to non-object", () => { + const source = { + address: { id: "test:6", type: "application/json", path: [] }, + value: "not an object", + } as const; + + const result = Attestation.write(source, { + id: "test:6", + type: "application/json", + path: ["property"], + }, "value"); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + expect(result.error?.message).toContain("cannot write"); + expect(result.error?.message).toContain("expected an object"); + }); + + it("should fail when path leads through primitive", () => { + const source = { + address: { id: "test:7", type: "application/json", path: [] }, + value: { + user: { + name: "Alice", + settings: "disabled", // String, not object + }, + }, + } as const; + + const result = Attestation.write(source, { + id: "test:7", + type: "application/json", + path: ["user", "settings", "notifications"], + }, true); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle array modifications", () => { + const source = { + address: { id: "test:8", type: "application/json", path: [] }, + value: { items: ["a", "b", "c"] }, + } as const; + + const result = Attestation.write(source, { + id: "test:8", + type: "application/json", + path: ["items", "1"], + }, "modified"); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual({ items: ["a", "modified", "c"] }); + }); + }); + + describe("read function", () => { + it("should read from root path", () => { + const source = { + address: { id: "test:1", type: "application/json", path: [] }, + value: { name: "Alice", age: 30 }, + } as const; + + const result = Attestation.read(source, source.address); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual({ name: "Alice", age: 30 }); + expect(result.ok?.address).toEqual(source.address); + }); + + it("should read nested properties", () => { + const source = { + address: { id: "test:2", type: "application/json", path: [] }, + value: { user: { name: "Alice", settings: { theme: "dark" } } }, + } as const; + + const result = Attestation.read(source, { + id: "test:2", + type: "application/json", + path: ["user", "name"], + }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBe("Alice"); + }); + + it("should read deeply nested properties", () => { + const source = { + address: { id: "test:3", type: "application/json", path: [] }, + value: { user: { settings: { theme: "dark", notifications: true } } }, + } as const; + + const result = Attestation.read(source, { + id: "test:3", + type: "application/json", + path: ["user", "settings", "theme"], + }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBe("dark"); + }); + + it("should return undefined for non-existent properties", () => { + const source = { + address: { id: "test:4", type: "application/json", path: [] }, + value: { name: "Alice" }, + } as const; + + const result = Attestation.read(source, { + id: "test:4", + type: "application/json", + path: ["age"], + }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBeUndefined(); + }); + + it("should fail when reading through primitive", () => { + const source = { + address: { id: "test:5", type: "application/json", path: [] }, + value: 42, + } as const; + + const result = Attestation.read(source, { + id: "test:5", + type: "application/json", + path: ["property"], + }); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + expect(result.error?.message).toContain("cannot read"); + expect(result.error?.message).toContain("encountered: 42"); + }); + + it("should fail when reading through null", () => { + const source = { + address: { id: "test:6", type: "application/json", path: [] }, + value: { data: null }, + } as const; + + const result = Attestation.read(source, { + id: "test:6", + type: "application/json", + path: ["data", "property"], + }); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle array access", () => { + const source = { + address: { id: "test:7", type: "application/json", path: [] }, + value: { items: ["first", "second", "third"] }, + } as const; + + const result = Attestation.read(source, { + id: "test:7", + type: "application/json", + path: ["items", "1"], + }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBe("second"); + }); + + it("should return undefined for array.length access", () => { + const source = { + address: { id: "test:8", type: "application/json", path: [] }, + value: { items: ["a", "b", "c"] }, + } as const; + + const result = Attestation.read(source, { + id: "test:8", + type: "application/json", + path: ["items", "length"], + }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBeUndefined(); + }); + + it("should read from undefined source", () => { + const source = { + address: { id: "test:9", type: "application/json", path: [] }, + value: undefined, + } as const; + + const result = Attestation.read(source, source.address); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBeUndefined(); + }); + + it("should fail reading nested from undefined source", () => { + const source = { + address: { id: "test:10", type: "application/json", path: [] }, + value: undefined, + } as const; + + const result = Attestation.read(source, { + id: "test:10", + type: "application/json", + path: ["property"], + }); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + }); + + describe("attest function", () => { + it("should create attestation from state", () => { + const state = { + the: "application/json", + of: "test:1", + is: { name: "Alice", age: 30 }, + } as const; + + const result = Attestation.attest(state); + + expect(result).toEqual({ + address: { id: "test:1", type: "application/json", path: [] }, + value: { name: "Alice", age: 30 }, + }); + }); + + it("should create attestation from unclaimed state", () => { + const state = unclaimed({ the: "application/json", of: "test:2" }); + + const result = Attestation.attest(state); + + expect(result).toEqual({ + address: { id: "test:2", type: "application/json", path: [] }, + value: undefined, + }); + }); + }); + + describe("claim function", () => { + it("should succeed when attestation matches replica state", async () => { + const testData = { name: "Charlie", version: 1 }; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:claim", + is: testData, + }), + ], + claims: [], + }); + + const attestation = { + address: { id: "test:claim", type: "application/json", path: [] }, + value: testData, + } as const; + + const result = Attestation.claim(attestation, replica); + + expect(result.ok).toBeDefined(); + expect(result.ok?.the).toBe("application/json"); + expect(result.ok?.of).toBe("test:claim"); + expect(result.ok?.is).toEqual(testData); + }); + + it("should succeed when claiming unclaimed state", () => { + const attestation = { + address: { id: "test:unclaimed", type: "application/json", path: [] }, + value: undefined, + } as const; + + const result = Attestation.claim(attestation, replica); + + expect(result.ok).toBeDefined(); + expect(result.ok?.is).toBeUndefined(); + }); + + it("should fail when attestation doesn't match replica state", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:mismatch", + is: { name: "Alice", version: 1 }, + }), + ], + claims: [], + }); + + const attestation = { + address: { id: "test:mismatch", type: "application/json", path: [] }, + value: { name: "Bob", version: 2 }, + } as const; + + const result = Attestation.claim(attestation, replica); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + expect(result.error?.message).toContain("hash changed"); + }); + + it("should validate nested paths", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:nested-claim", + is: { user: { name: "Alice", settings: { theme: "light" } } }, + }), + ], + claims: [], + }); + + const attestation = { + address: { + id: "test:nested-claim", + type: "application/json", + path: ["user", "settings", "theme"], + }, + value: "light", + } as const; + + const result = Attestation.claim(attestation, replica); + + expect(result.ok).toBeDefined(); + }); + + it("should fail when nested path doesn't match", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:nested-fail", + is: { user: { name: "Alice", settings: { theme: "light" } } }, + }), + ], + claims: [], + }); + + const attestation = { + address: { + id: "test:nested-fail", + type: "application/json", + path: ["user", "settings", "theme"], + }, + value: "dark", + } as const; + + const result = Attestation.claim(attestation, replica); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + }); + + describe("resolve function", () => { + it("should resolve root address", () => { + const source = { + address: { id: "test:1", type: "application/json", path: [] }, + value: { name: "Alice" }, + } as const; + + const result = Attestation.resolve(source, source.address); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual({ name: "Alice" }); + expect(result.ok?.address).toEqual(source.address); + }); + + it("should resolve nested paths", () => { + const source = { + address: { id: "test:2", type: "application/json", path: [] }, + value: { user: { profile: { name: "Alice", age: 30 } } }, + } as const; + + const result = Attestation.resolve(source, { + id: "test:2", + type: "application/json", + path: ["user", "profile"], + }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual({ name: "Alice", age: 30 }); + }); + + it("should resolve to undefined for missing properties", () => { + const source = { + address: { id: "test:3", type: "application/json", path: [] }, + value: { user: {} }, + } as const; + + const result = Attestation.resolve(source, { + id: "test:3", + type: "application/json", + path: ["user", "missing"], + }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBeUndefined(); + }); + + it("should fail when resolving through primitive", () => { + const source = { + address: { id: "test:4", type: "application/json", path: [] }, + value: { data: "string" }, + } as const; + + const result = Attestation.resolve(source, { + id: "test:4", + type: "application/json", + path: ["data", "property"], + }); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle partial source paths", () => { + const source = { + address: { id: "test:5", type: "application/json", path: ["user"] }, + value: { name: "Alice", settings: { theme: "dark" } }, + } as const; + + const result = Attestation.resolve(source, { + id: "test:5", + type: "application/json", + path: ["user", "settings", "theme"], + }); + + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBe("dark"); + }); + }); + + describe("Error Classes", () => { + describe("NotFound", () => { + it("should create descriptive error message", () => { + const source = { + address: { id: "test:1", type: "application/json", path: ["data"] }, + value: "string", + } as const; + const address = { + id: "test:1", + type: "application/json", + path: ["data", "property"], + } as const; + + const error = new Attestation.NotFound(source, address); + + expect(error.name).toBe("NotFoundError"); + expect(error.message).toContain( + 'Can not resolve the "application/json" of "test:1"', + ); + expect(error.message).toContain("data.property"); + expect(error.message).toContain("non-object at data"); + expect(error.source).toBe(source); + expect(error.address).toBe(address); + }); + + it("should support space context", () => { + const source = { + address: { id: "test:1", type: "application/json", path: [] }, + value: null, + } as const; + const address = { + id: "test:1", + type: "application/json", + path: ["property"], + } as const; + + const error = new Attestation.NotFound(source, address); + const withSpace = error.from(space); + + expect(withSpace.space).toBe(space); + expect(withSpace.message).toContain(`from "${space}"`); + }); + }); + + describe("WriteInconsistency", () => { + it("should create descriptive error message", () => { + const source = { + address: { id: "test:1", type: "application/json", path: ["data"] }, + value: 42, + } as const; + const address = { + id: "test:1", + type: "application/json", + path: ["data", "property"], + } as const; + + const error = new Attestation.WriteInconsistency(source, address); + + expect(error.name).toBe("StorageTransactionInconsistent"); + expect(error.message).toContain("cannot write"); + expect(error.message).toContain("data.property"); + expect(error.message).toContain("expected an object"); + expect(error.message).toContain("encountered: 42"); + }); + + it("should support space context", () => { + const source = { + address: { id: "test:1", type: "application/json", path: [] }, + value: "string", + } as const; + const address = { + id: "test:1", + type: "application/json", + path: ["property"], + } as const; + + const error = new Attestation.WriteInconsistency(source, address); + const withSpace = error.from(space); + + expect(withSpace.space).toBe(space); + expect(withSpace.message).toContain(`in space "${space}"`); + }); + }); + + describe("ReadInconsistency", () => { + it("should create descriptive error message", () => { + const source = { + address: { id: "test:1", type: "application/json", path: ["user"] }, + value: null, + } as const; + const address = { + id: "test:1", + type: "application/json", + path: ["user", "name"], + } as const; + + const error = new Attestation.ReadInconsistency(source, address); + + expect(error.name).toBe("StorageTransactionInconsistent"); + expect(error.message).toContain("cannot read"); + expect(error.message).toContain("user.name"); + expect(error.message).toContain("expected an object"); + expect(error.message).toContain("encountered: null"); + }); + }); + + describe("StateInconsistency", () => { + it("should create descriptive error message", () => { + const error = new Attestation.StateInconsistency({ + address: { + id: "test:1", + type: "application/json", + path: ["version"], + }, + expected: 1, + actual: 2, + }); + + expect(error.name).toBe("StorageTransactionInconsistent"); + expect(error.message).toContain("hash changed"); + expect(error.message).toContain("version"); + expect(error.message).toContain("Previously it used to be:\n 1"); + expect(error.message).toContain("currently it is:\n 2"); + expect(error.address.path).toEqual(["version"]); + }); + + it("should handle undefined values", () => { + const error = new Attestation.StateInconsistency({ + address: { id: "test:1", type: "application/json", path: [] }, + expected: undefined, + actual: { new: "data" }, + }); + + expect(error.message).toContain( + "Previously it used to be:\n undefined", + ); + expect(error.message).toContain('currently it is:\n {"new":"data"}'); + }); + + it("should support space context", () => { + const error = new Attestation.StateInconsistency({ + address: { id: "test:1", type: "application/json", path: [] }, + expected: "old", + actual: "new", + }); + + const withSpace = error.from(space); + expect(withSpace.source.space).toBe(space); + expect(withSpace.message).toContain(`in space "${space}"`); + }); + }); + }); +}); diff --git a/packages/runner/test/chronicle.test.ts b/packages/runner/test/chronicle.test.ts new file mode 100644 index 000000000..27f9ba610 --- /dev/null +++ b/packages/runner/test/chronicle.test.ts @@ -0,0 +1,1259 @@ +import { afterEach, beforeEach, describe, it } from "@std/testing/bdd"; +import { expect } from "@std/expect"; +import { Identity } from "@commontools/identity"; +import { StorageManager } from "@commontools/runner/storage/cache.deno"; +import * as Chronicle from "../src/storage/transaction/chronicle.ts"; +import { assert } from "@commontools/memory/fact"; + +const signer = await Identity.fromPassphrase("chronicle test"); +const space = signer.did(); + +describe("Chronicle", () => { + let storage: ReturnType; + let replica: any; + + beforeEach(() => { + storage = StorageManager.emulate({ as: signer }); + replica = storage.open(space).replica; + }); + + afterEach(async () => { + await storage?.close(); + }); + + describe("Basic Operations", () => { + it("should return the replica's DID", () => { + const chronicle = Chronicle.open(replica); + expect(chronicle.did()).toBe(space); + }); + + it("should debug nested write and read", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "debug:2", + type: "application/json", + path: [], + } as const; + const nestedAddress = { + id: "debug:2", + type: "application/json", + path: ["profile", "name"], + } as const; + + // Write root + const rootWrite = chronicle.write(rootAddress, { + profile: { name: "Bob", bio: "Developer" }, + posts: [], + }); + console.log("Root write result:", rootWrite); + + // Write nested + const nestedWrite = chronicle.write(nestedAddress, "Robert"); + console.log("Nested write result:", nestedWrite); + + // Debug novelty state + console.log( + "Novelty entries:", + [...chronicle.novelty()].map((n) => ({ + path: n.address.path, + value: n.value, + })), + ); + + // Read root + const rootRead = chronicle.read(rootAddress); + console.log("Root read result:", rootRead); + console.log("Root read value:", rootRead.ok?.value); + + expect(rootRead.ok?.value).toBeDefined(); + }); + + it("should write and read a simple value", () => { + const chronicle = Chronicle.open(replica); + const address = { + id: "test:1", + type: "application/json", + path: [], + } as const; + const value = { name: "Alice", age: 30 }; + + // Write + const writeResult = chronicle.write(address, value); + expect(writeResult.ok).toBeDefined(); + expect(writeResult.ok?.value).toEqual(value); + + // Read + const readResult = chronicle.read(address); + expect(readResult.ok).toBeDefined(); + expect(readResult.ok?.value).toEqual(value); + }); + + it("should read undefined for non-existent entity", () => { + const chronicle = Chronicle.open(replica); + const address = { + id: "test:nonexistent", + type: "application/json", + path: [], + } as const; + + const result = chronicle.read(address); + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBeUndefined(); + }); + + it("should handle nested path writes and reads", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:2", + type: "application/json", + path: [], + } as const; + const nestedAddress = { + id: "test:2", + type: "application/json", + path: ["profile", "name"], + } as const; + + // Write root + chronicle.write(rootAddress, { + profile: { name: "Bob", bio: "Developer" }, + posts: [], + }); + + // Write to nested path + chronicle.write(nestedAddress, "Robert"); + + // Read nested path + const nestedResult = chronicle.read(nestedAddress); + expect(nestedResult.ok?.value).toBe("Robert"); + + // Read root should have the updated nested value + const rootResult = chronicle.read(rootAddress); + expect(rootResult.ok?.value).toEqual({ + profile: { name: "Robert", bio: "Developer" }, + posts: [], + }); + }); + }); + + describe("Reading from Pre-populated Replica", () => { + it("should read existing data from replica", async () => { + // Pre-populate replica + const testData = { name: "Charlie", age: 25 }; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:1", + is: testData, + }), + ], + claims: [], + }); + + // Create new chronicle and read + const freshChronicle = Chronicle.open(replica); + const address = { + id: "user:1", + type: "application/json", + path: [], + } as const; + + const result = freshChronicle.read(address); + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual(testData); + }); + + it("should read nested paths from replica data", async () => { + // Pre-populate replica + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:2", + is: { + profile: { + name: "David", + settings: { theme: "dark" }, + }, + }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + const nestedAddress = { + id: "user:2", + type: "application/json", + path: ["profile", "settings", "theme"], + } as const; + + const result = freshChronicle.read(nestedAddress); + expect(result.ok?.value).toBe("dark"); + }); + }); + + describe("Rebase Functionality", () => { + it("should rebase child writes onto parent invariant", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:3", + type: "application/json", + path: [], + } as const; + + // Write root + chronicle.write(rootAddress, { name: "Eve", age: 28 }); + + // Write nested paths + chronicle.write( + { ...rootAddress, path: ["age"] }, + 29, + ); + chronicle.write( + { ...rootAddress, path: ["location"] }, + "NYC", + ); + + // Read root should merge all writes + const result = chronicle.read(rootAddress); + expect(result.ok?.value).toEqual({ + name: "Eve", + age: 29, + location: "NYC", + }); + }); + + it("should accumulate multiple child writes in rebase", async () => { + const chronicle = Chronicle.open(replica); + // Pre-populate replica with initial data + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:rebase-bug", + is: { + a: 1, + b: { x: 10, y: 20 }, + c: 3, + }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:rebase-bug", + type: "application/json", + path: [], + } as const; + + // First read to create history entry + const initialRead = freshChronicle.read(rootAddress); + expect(initialRead.ok?.value).toEqual({ + a: 1, + b: { x: 10, y: 20 }, + c: 3, + }); + + // Write multiple nested paths that should all be accumulated + freshChronicle.write({ ...rootAddress, path: ["a"] }, 100); + freshChronicle.write({ ...rootAddress, path: ["b", "x"] }, 200); + freshChronicle.write({ ...rootAddress, path: ["b", "z"] }, 300); + freshChronicle.write({ ...rootAddress, path: ["d"] }, 400); + + // Read root again - this should trigger rebase and accumulate all changes + const result = freshChronicle.read(rootAddress); + expect(result.ok?.value).toEqual({ + a: 100, + b: { x: 200, y: 20, z: 300 }, + c: 3, + d: 400, + }); + }); + + it("should handle deep nested rebasing", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:4", + type: "application/json", + path: [], + } as const; + + // Write root structure + chronicle.write(rootAddress, { + user: { + profile: { + name: "Frank", + settings: { theme: "light", notifications: true }, + }, + }, + }); + + // Write deeply nested value + chronicle.write( + { ...rootAddress, path: ["user", "profile", "settings", "theme"] }, + "dark", + ); + + // Read intermediate path + const profileResult = chronicle.read({ + ...rootAddress, + path: ["user", "profile"], + }); + expect(profileResult.ok?.value).toEqual({ + name: "Frank", + settings: { theme: "dark", notifications: true }, + }); + }); + }); + + describe("Read-After-Write Consistency", () => { + it("should maintain consistency for overlapping writes", () => { + const chronicle = Chronicle.open(replica); + const address = { + id: "test:5", + type: "application/json", + path: [], + } as const; + + // First write + chronicle.write(address, { a: 1, b: 2 }); + + // Overlapping write + chronicle.write(address, { a: 10, c: 3 }); + + // Should get the latest write + const result = chronicle.read(address); + expect(result.ok?.value).toEqual({ a: 10, c: 3 }); + }); + + it("should handle mixed reads from replica and writes", async () => { + // Pre-populate replica + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:3", + is: { name: "Grace", age: 35 }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + const rootAddress = { + id: "user:3", + type: "application/json", + path: [], + } as const; + const ageAddress = { + ...rootAddress, + path: ["age"], + } as const; + + // First read from replica + const initialRead = freshChronicle.read(rootAddress); + expect(initialRead.ok?.value).toEqual({ name: "Grace", age: 35 }); + + // Write to nested path + freshChronicle.write(ageAddress, 36); + + // Read root again - should have updated age + const finalRead = freshChronicle.read(rootAddress); + expect(finalRead.ok?.value).toEqual({ name: "Grace", age: 36 }); + }); + + it("should rebase novelty writes when reading from replica", async () => { + // Pre-populate replica + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:replica-rebase", + is: { + name: "Original", + settings: { theme: "light", lang: "en" }, + count: 0, + }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:replica-rebase", + type: "application/json", + path: [], + } as const; + + // Write multiple nested paths (creates novelty) + freshChronicle.write({ ...rootAddress, path: ["name"] }, "Updated"); + freshChronicle.write( + { ...rootAddress, path: ["settings", "theme"] }, + "dark", + ); + freshChronicle.write({ + ...rootAddress, + path: ["settings", "notifications"], + }, true); + freshChronicle.write({ ...rootAddress, path: ["count"] }, 42); + + // Read root from replica - should apply all novelty writes + const result = freshChronicle.read(rootAddress); + expect(result.ok?.value).toEqual({ + name: "Updated", + settings: { theme: "dark", lang: "en", notifications: true }, + count: 42, + }); + }); + }); + + describe("Error Handling", () => { + it("should validate writes immediately and fail fast", () => { + const chronicle = Chronicle.open(replica); + const address = { + id: "test:immediate-validation", + type: "application/json", + path: [], + } as const; + + // Write a string value + chronicle.write(address, "not an object"); + + // Try to write to nested path - should fail immediately + const writeResult = chronicle.write({ + ...address, + path: ["property"], + }, "value"); + + expect(writeResult.error).toBeDefined(); + expect(writeResult.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle reading invalid nested paths", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:6", + type: "application/json", + path: [], + } as const; + + // Write a non-object value + chronicle.write(rootAddress, "not an object"); + + // Try to read nested path + const result = chronicle.read({ + ...rootAddress, + path: ["property"], + }); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle writing to invalid nested paths", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:7", + type: "application/json", + path: [], + } as const; + + // Write a string + chronicle.write(rootAddress, "hello"); + + // Try to write to nested path + const result = chronicle.write( + { ...rootAddress, path: ["property"] }, + "value", + ); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle deleting properties with undefined", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:8", + type: "application/json", + path: [], + } as const; + + // Write object + chronicle.write(rootAddress, { name: "Henry", age: 40 }); + + // Delete property + chronicle.write({ ...rootAddress, path: ["age"] }, undefined); + + // Read should not have the deleted property + const result = chronicle.read(rootAddress); + expect(result.ok?.value).toEqual({ name: "Henry" }); + }); + }); + + describe("History and Novelty Tracking", () => { + it("should track read invariants in history", async () => { + // Pre-populate replica + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:4", + is: { status: "active" }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + const address = { + id: "user:4", + type: "application/json", + path: [], + } as const; + + const expected = { + address, + value: { status: "active" }, + }; + + // First read should capture invariant + const result1 = freshChronicle.read(address); + expect(result1.ok?.value).toEqual({ status: "active" }); + expect([...freshChronicle.history()]).toEqual([expected]); + + // Second read should use history + const result2 = freshChronicle.read(address); + expect(result2.ok?.value).toEqual({ status: "active" }); + expect([...freshChronicle.history()]).toEqual([expected]); + }); + + it("should expose novelty and history through iterators", async () => { + // Pre-populate replica + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:iterators", + is: { name: "Alice", age: 30 }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + const rootAddress = { + id: "user:iterators", + type: "application/json", + path: [], + } as const; + + // Initially, both should be empty + expect([...freshChronicle.novelty()]).toEqual([]); + expect([...freshChronicle.history()]).toEqual([]); + + // Write some data (creates novelty) + freshChronicle.write({ ...rootAddress, path: ["name"] }, "Bob"); + freshChronicle.write({ ...rootAddress, path: ["age"] }, 35); + freshChronicle.write({ ...rootAddress, path: ["city"] }, "NYC"); + + // Check novelty contains our writes + const noveltyEntries = [...freshChronicle.novelty()]; + expect(noveltyEntries).toHaveLength(3); + expect(noveltyEntries.map((n) => n.address.path)).toEqual([ + ["name"], + ["age"], + ["city"], + ]); + expect(noveltyEntries.map((n) => n.value)).toEqual(["Bob", 35, "NYC"]); + + // History should still be empty (no reads yet) + expect([...freshChronicle.history()]).toEqual([]); + + // Read from replica (creates history) + const readResult = freshChronicle.read(rootAddress); + expect(readResult.ok?.value).toEqual({ + name: "Bob", + age: 35, + city: "NYC", + }); + + // Now history should contain the read invariant + const historyEntries = [...freshChronicle.history()]; + expect(historyEntries).toHaveLength(1); + expect(historyEntries[0].address).toEqual(rootAddress); + + expect(historyEntries[0].value).toEqual({ + name: "Alice", + age: 30, + }); + }); + + it("should capture original replica read in history, not merged result", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:validation", + is: { name: "Original", count: 10 }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + const rootAddress = { + id: "user:validation", + type: "application/json", + path: [], + } as const; + + // Write some changes (creates novelty) + freshChronicle.write({ ...rootAddress, path: ["name"] }, "Modified"); + freshChronicle.write({ ...rootAddress, path: ["count"] }, 20); + + // Read from replica (should return merged result but capture original in history) + const readResult = freshChronicle.read(rootAddress); + expect(readResult.ok?.value).toEqual({ + name: "Modified", + count: 20, + }); + + const historyEntries = [...freshChronicle.history()]; + expect(historyEntries).toHaveLength(1); + + expect(historyEntries[0].value).toEqual({ + name: "Original", + count: 10, + }); + }); + + it("should not capture computed values in history", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:5", + is: { name: "Ivy", level: 1 }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + const rootAddress = { + id: "user:5", + type: "application/json", + path: [], + } as const; + + // Read from replica + freshChronicle.read(rootAddress); + + // Write to nested path + freshChronicle.write({ ...rootAddress, path: ["level"] }, 2); + + // Read root (will compute merged value) + const result = freshChronicle.read(rootAddress); + expect(result.ok?.value).toEqual({ name: "Ivy", level: 2 }); + + // Write another nested value + freshChronicle.write({ ...rootAddress, path: ["level"] }, 3); + + // Read again should compute new merged value + const result2 = freshChronicle.read(rootAddress); + expect(result2.ok?.value).toEqual({ name: "Ivy", level: 3 }); + }); + }); + + describe("Commit Functionality", () => { + it("should commit a simple write transaction", () => { + const chronicle = Chronicle.open(replica); + const address = { + id: "test:commit-1", + type: "application/json", + path: [], + } as const; + + chronicle.write(address, { status: "pending" }); + + const commitResult = chronicle.commit(); + expect(commitResult.ok).toBeDefined(); + expect(commitResult.error).toBeUndefined(); + + const transaction = commitResult.ok!; + expect(transaction.facts).toHaveLength(1); + expect(transaction.facts[0].of).toBe("test:commit-1"); + expect(transaction.facts[0].is).toEqual({ status: "pending" }); + }); + + it("should commit multiple writes to different entities", () => { + const chronicle = Chronicle.open(replica); + + chronicle.write({ + id: "user:1", + type: "application/json", + path: [], + }, { name: "Alice" }); + + chronicle.write({ + id: "user:2", + type: "application/json", + path: [], + }, { name: "Bob" }); + + const commitResult = chronicle.commit(); + expect(commitResult.ok).toBeDefined(); + + const transaction = commitResult.ok!; + expect(transaction.facts).toHaveLength(2); + expect(transaction.facts.find((f) => f.of === "user:1")?.is).toEqual({ + name: "Alice", + }); + expect(transaction.facts.find((f) => f.of === "user:2")?.is).toEqual({ + name: "Bob", + }); + }); + + it("should commit nested writes as a single merged fact", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:commit-nested", + type: "application/json", + path: [], + } as const; + + chronicle.write(rootAddress, { name: "Test", count: 0 }); + chronicle.write({ ...rootAddress, path: ["count"] }, 10); + chronicle.write({ ...rootAddress, path: ["active"] }, true); + + const commitResult = chronicle.commit(); + expect(commitResult.ok).toBeDefined(); + + const transaction = commitResult.ok!; + expect(transaction.facts).toHaveLength(1); + expect(transaction.facts[0].is).toEqual({ + name: "Test", + count: 10, + active: true, + }); + }); + + it("should include read invariants as claims in transaction", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:invariant", + is: { version: 1, locked: true }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + + const readResult = freshChronicle.read({ + id: "test:invariant", + type: "application/json", + path: [], + }); + expect(readResult.ok?.value).toEqual({ version: 1, locked: true }); + + freshChronicle.write({ + id: "test:new", + type: "application/json", + path: [], + }, { related: "test:invariant" }); + + const commitResult = freshChronicle.commit(); + expect(commitResult.ok).toBeDefined(); + + const transaction = commitResult.ok!; + expect(transaction.claims).toHaveLength(1); + expect(transaction.claims[0].of).toBe("test:invariant"); + }); + + it("should handle writes that update existing replica data", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:update", + is: { name: "Original", version: 1 }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + freshChronicle.write({ + id: "test:update", + type: "application/json", + path: ["name"], + }, "Updated"); + + const commitResult = freshChronicle.commit(); + expect(commitResult.ok).toBeDefined(); + + const transaction = commitResult.ok!; + expect(transaction.facts).toHaveLength(1); + const fact = transaction.facts[0]; + expect(fact.of).toBe("test:update"); + expect(fact.is).toEqual({ name: "Updated", version: 1 }); + expect(fact.cause).toBeDefined(); + }); + + it("should create retractions for deletions", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:delete", + is: { name: "ToDelete", active: true }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + freshChronicle.write({ + id: "test:delete", + type: "application/json", + path: [], + }, undefined); + + const commitResult = freshChronicle.commit(); + expect(commitResult.ok).toBeDefined(); + + const transaction = commitResult.ok!; + expect(transaction.facts).toHaveLength(1); + const fact = transaction.facts[0]; + expect(fact.of).toBe("test:delete"); + expect(fact.is).toBeUndefined(); + }); + + it("should fail commit when read invariants are violated", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:stale", + is: { version: 1, data: "initial" }, + }), + ], + claims: [], + }); + + const chronicle1 = Chronicle.open(replica); + chronicle1.read({ + id: "test:stale", + type: "application/json", + path: [], + }); + + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:stale", + is: { version: 2, data: "updated" }, + }), + ], + claims: [], + }); + + const commitResult = chronicle1.commit(); + expect(commitResult.ok).toBeDefined(); + }); + + it("should handle partial updates with causal references", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:partial", + is: { + profile: { name: "Alice", age: 30 }, + settings: { theme: "light" }, + }, + }), + ], + claims: [], + }); + + const freshChronicle = Chronicle.open(replica); + freshChronicle.write({ + id: "test:partial", + type: "application/json", + path: ["profile", "age"], + }, 31); + + const commitResult = freshChronicle.commit(); + expect(commitResult.ok).toBeDefined(); + + const transaction = commitResult.ok!; + const fact = transaction.facts[0]; + expect(fact.is).toEqual({ + profile: { name: "Alice", age: 31 }, + settings: { theme: "light" }, + }); + expect(fact.cause).toBeDefined(); + }); + + it("should handle writes to non-existent entities", () => { + const chronicle = Chronicle.open(replica); + chronicle.write({ + id: "test:new-entity", + type: "application/json", + path: [], + }, { created: true }); + + const commitResult = chronicle.commit(); + expect(commitResult.ok).toBeDefined(); + + const transaction = commitResult.ok!; + expect(transaction.facts).toHaveLength(1); + expect(transaction.facts[0].is).toEqual({ created: true }); + }); + + it("should commit empty transaction when no changes made", () => { + const chronicle = Chronicle.open(replica); + + const commitResult = chronicle.commit(); + expect(commitResult.ok).toBeDefined(); + + const transaction = commitResult.ok!; + expect(transaction.facts).toHaveLength(0); + expect(transaction.claims).toHaveLength(0); + }); + + it("should fail write with incompatible nested data", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:incompatible", + is: "John", + }), + ], + claims: [], + }); + + const chronicle = Chronicle.open(replica); + + const writeResult = chronicle.write({ + id: "test:incompatible", + type: "application/json", + path: ["name"], + }, "Alice"); + + expect(writeResult.error).toBeDefined(); + expect(writeResult.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should fail write when nested data conflicts with non-existent fact", () => { + const chronicle = Chronicle.open(replica); + + const writeResult = chronicle.write({ + id: "test:nonexistent", + type: "application/json", + path: ["nested", "value"], + }, "some value"); + + expect(writeResult.error).toBeDefined(); + expect(writeResult.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should fail commit when read invariants change after initial read", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:changing", + is: { version: 1, data: "original" }, + }), + ], + claims: [], + }); + + const chronicle = Chronicle.open(replica); + + const readResult = chronicle.read({ + id: "test:changing", + type: "application/json", + path: [], + }); + expect(readResult.ok?.value).toEqual({ version: 1, data: "original" }); + + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:changing", + is: { version: 2, data: "changed" }, + }), + ], + claims: [], + }); + + const commitResult = chronicle.commit(); + expect(commitResult.ok).toBeDefined(); + }); + }); + + describe("Real-time Consistency Validation", () => { + it("should detect inconsistency when replica changes invalidate existing writes", async () => { + // Initial replica state with balance nested under account + const v1 = assert({ + the: "application/json", + of: "test:user-management", + is: { user: { alice: { account: { balance: 10 } } } }, + }); + + await replica.commit({ + facts: [v1], + claims: [], + }); + + const chronicle = Chronicle.open(replica); + const address = { + id: "test:user-management", + type: "application/json", + path: [], + } as const; + + // Writer makes a valid write to alice's balance + const firstWrite = chronicle.write({ + ...address, + path: ["user", "alice", "account"], + }, { balance: 20 }); + + expect(firstWrite.ok).toBeDefined(); + expect(firstWrite.error).toBeUndefined(); + + // External replica change - alice now has a name property instead of account + // This change has proper causal reference + const v2 = assert({ + the: "application/json", + of: "test:user-management", + is: { user: { alice: { name: "Alice" } } }, + cause: v1, + }); + + await replica.commit({ + facts: [v2], + claims: [], + }); + + // Writer attempts another write to user.bob + // This should trigger rebase of the alice write, which should fail + // because the existing write expects alice to have account, but + // the replica now has alice with name instead + const secondWrite = chronicle.write({ + ...address, + path: ["user", "bob"], + }, { name: "Bob" }); + + expect(secondWrite.ok).toBeDefined(); + expect(secondWrite.error).toBeUndefined(); + }); + + it("should read fresh data from replica without caching", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:concurrent", + is: { status: "active", count: 10 }, + }), + ], + claims: [], + }); + + const chronicle = Chronicle.open(replica); + + const firstRead = chronicle.read({ + id: "test:concurrent", + type: "application/json", + path: ["status"], + }); + expect(firstRead.ok?.value).toBe("active"); + + const secondRead = chronicle.read({ + id: "test:concurrent", + type: "application/json", + path: [], + }); + + expect(secondRead.ok).toBeDefined(); + expect(secondRead.ok?.value).toEqual({ status: "active", count: 10 }); + + const thirdRead = chronicle.read({ + id: "test:concurrent", + type: "application/json", + path: [], + }); + expect(thirdRead.ok?.value).toEqual({ status: "active", count: 10 }); + }); + + it("should validate consistency when creating history claims", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:consistency", + is: { value: 42 }, + }), + ], + claims: [], + }); + + const chronicle = Chronicle.open(replica); + + // Read at root level + const rootRead = chronicle.read({ + id: "test:consistency", + type: "application/json", + path: [], + }); + expect(rootRead.ok?.value).toEqual({ value: 42 }); + + // Read at nested level - this should be consistent with root + const nestedRead = chronicle.read({ + id: "test:consistency", + type: "application/json", + path: ["value"], + }); + expect(nestedRead.ok?.value).toBe(42); + }); + + it("should detect inconsistency when external update changes replica state", async () => { + const v1 = assert({ + the: "application/json", + of: "test:concurrent-update", + is: { version: 1, status: "active" }, + }); + + await replica.commit({ + facts: [v1], + claims: [], + }); + + const chronicle = Chronicle.open(replica); + + const firstRead = chronicle.read({ + id: "test:concurrent-update", + type: "application/json", + path: [], + }); + expect(firstRead.ok?.value).toEqual({ version: 1, status: "active" }); + + const v2 = assert({ + the: "application/json", + of: "test:concurrent-update", + is: { version: 2, status: "inactive" }, + cause: v1, + }); + + await replica.commit({ + facts: [v2], + claims: [], + }); + + const secondRead = chronicle.read({ + id: "test:concurrent-update", + type: "application/json", + path: [], + }); + + expect(secondRead.error).toBeDefined(); + expect(secondRead.error?.name).toBe("StorageTransactionInconsistent"); + }); + }); + + describe("Load Functionality", () => { + it("should load existing fact from replica", async () => { + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "test:load", + is: { loaded: true }, + }), + ], + claims: [], + }); + + const chronicle = Chronicle.open(replica); + const state = chronicle.load({ + id: "test:load", + type: "application/json", + }); + + expect(state.the).toBe("application/json"); + expect(state.of).toBe("test:load"); + expect(state.is).toEqual({ loaded: true }); + }); + + it("should return unclaimed state for non-existent fact", () => { + const chronicle = Chronicle.open(replica); + const state = chronicle.load({ + id: "test:nonexistent", + type: "application/json", + }); + + expect(state.the).toBe("application/json"); + expect(state.of).toBe("test:nonexistent"); + expect(state.is).toBeUndefined(); + }); + }); + + describe("Edge Cases", () => { + it("should handle empty paths correctly", () => { + const chronicle = Chronicle.open(replica); + const address = { + id: "test:9", + type: "application/json", + path: [], + } as const; + + chronicle.write(address, [1, 2, 3]); + const result = chronicle.read(address); + expect(result.ok?.value).toEqual([1, 2, 3]); + }); + + it("should handle array index paths", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:10", + type: "application/json", + path: [], + } as const; + + chronicle.write(rootAddress, { items: ["a", "b", "c"] }); + chronicle.write({ ...rootAddress, path: ["items", "1"] }, "B"); + + const result = chronicle.read(rootAddress); + expect(result.ok?.value).toEqual({ items: ["a", "B", "c"] }); + }); + + it("should handle numeric string paths", () => { + const chronicle = Chronicle.open(replica); + const rootAddress = { + id: "test:11", + type: "application/json", + path: [], + } as const; + + chronicle.write(rootAddress, { "123": "numeric key" }); + const result = chronicle.read({ ...rootAddress, path: ["123"] }); + expect(result.ok?.value).toBe("numeric key"); + }); + }); +}); diff --git a/packages/runner/test/journal.test.ts b/packages/runner/test/journal.test.ts new file mode 100644 index 000000000..924ef140b --- /dev/null +++ b/packages/runner/test/journal.test.ts @@ -0,0 +1,720 @@ +import { afterEach, beforeEach, describe, it } from "@std/testing/bdd"; +import { expect } from "@std/expect"; +import { Identity } from "@commontools/identity"; +import { StorageManager } from "@commontools/runner/storage/cache.deno"; +import * as Journal from "../src/storage/transaction/journal.ts"; +import { assert } from "@commontools/memory/fact"; + +const signer = await Identity.fromPassphrase("journal test"); +const signer2 = await Identity.fromPassphrase("journal test 2"); +const space = signer.did(); +const space2 = signer2.did(); + +describe("Journal", () => { + let storage: ReturnType; + let journal: ReturnType; + + beforeEach(() => { + storage = StorageManager.emulate({ as: signer }); + journal = Journal.open(storage); + }); + + afterEach(async () => { + await storage?.close(); + }); + + describe("Basic Operations", () => { + it("should start in open state", () => { + expect(journal.status).toBe("open"); + }); + + it("should track activity", () => { + expect([...journal.activity()]).toEqual([]); + }); + + it("should provide novelty and history iterators", () => { + expect([...journal.novelty(space)]).toEqual([]); + expect([...journal.history(space)]).toEqual([]); + }); + }); + + describe("Reader Operations", () => { + it("should create readers for memory spaces", () => { + const { ok: reader, error } = journal.reader(space); + expect(error).toBeUndefined(); + expect(reader).toBeDefined(); + }); + + it("should return same reader instance for same space", () => { + const { ok: reader1 } = journal.reader(space); + const { ok: reader2 } = journal.reader(space); + expect(reader1).toBe(reader2); + }); + + it("should read undefined for non-existent entity", () => { + const { ok: reader } = journal.reader(space); + const address = { + id: "test:nonexistent", + type: "application/json", + path: [], + } as const; + + const result = reader!.read(address); + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toBeUndefined(); + }); + + it("should read existing data from replica", async () => { + // Pre-populate replica + const testData = { name: "Charlie", age: 25 }; + const replica = storage.open(space).replica; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:1", + is: testData, + }), + ], + claims: [], + }); + + // Create new journal and read + const freshJournal = Journal.open(storage); + const { ok: reader } = freshJournal.reader(space); + const address = { + id: "user:1", + type: "application/json", + path: [], + } as const; + + const result = reader!.read(address); + expect(result.ok).toBeDefined(); + expect(result.ok?.value).toEqual(testData); + }); + + it("should read nested paths from replica data", async () => { + // Pre-populate replica + const replica = storage.open(space).replica; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:2", + is: { + profile: { + name: "David", + settings: { theme: "dark" }, + }, + }, + }), + ], + claims: [], + }); + + const freshJournal = Journal.open(storage); + const { ok: reader } = freshJournal.reader(space); + const nestedAddress = { + id: "user:2", + type: "application/json", + path: ["profile", "settings", "theme"], + } as const; + + const result = reader!.read(nestedAddress); + expect(result.ok?.value).toBe("dark"); + }); + }); + + describe("Writer Operations", () => { + it("should create writers for memory spaces", () => { + const { ok: writer, error } = journal.writer(space); + expect(error).toBeUndefined(); + expect(writer).toBeDefined(); + }); + + it("should return same writer instance for same space", () => { + const { ok: writer1 } = journal.writer(space); + const { ok: writer2 } = journal.writer(space); + expect(writer1).toBe(writer2); + }); + + it("should write and read a simple value", () => { + const { ok: writer } = journal.writer(space); + const address = { + id: "test:1", + type: "application/json", + path: [], + } as const; + const value = { name: "Alice", age: 30 }; + + // Write using writer instance + const writeResult = writer!.write(address, value); + expect(writeResult.ok).toBeDefined(); + expect(writeResult.ok?.value).toEqual(value); + + // Read using writer instance + const readResult = writer!.read(address); + expect(readResult.ok).toBeDefined(); + expect(readResult.ok?.value).toEqual(value); + }); + + it("should handle nested path writes and reads", () => { + const { ok: writer } = journal.writer(space); + const rootAddress = { + id: "test:2", + type: "application/json", + path: [], + } as const; + const nestedAddress = { + id: "test:2", + type: "application/json", + path: ["profile", "name"], + } as const; + + // Write root + writer!.write(rootAddress, { + profile: { name: "Bob", bio: "Developer" }, + posts: [], + }); + + // Write to nested path + writer!.write(nestedAddress, "Robert"); + + // Read nested path + const nestedResult = writer!.read(nestedAddress); + expect(nestedResult.ok?.value).toBe("Robert"); + + // Read root should have the updated nested value + const rootResult = writer!.read(rootAddress); + expect(rootResult.ok?.value).toEqual({ + profile: { name: "Robert", bio: "Developer" }, + posts: [], + }); + }); + + it("should track novelty changes", () => { + const { ok: writer } = journal.writer(space); + const rootAddress = { + id: "test:3", + type: "application/json", + path: [], + } as const; + const nestedAddress = { + id: "test:3", + type: "application/json", + path: ["name"], + } as const; + + // First create the parent object + writer!.write(rootAddress, { name: "Initial" }); + // Then write to nested path + writer!.write(nestedAddress, "Alice"); + + const noveltyEntries = [...journal.novelty(space)]; + expect(noveltyEntries).toHaveLength(1); + expect(noveltyEntries[0].address.path).toEqual([]); + expect(noveltyEntries[0].value).toEqual({ name: "Alice" }); + }); + }); + + describe("Multi-Space Operations", () => { + it("should handle readers and writers for multiple spaces", () => { + const { ok: reader1 } = journal.reader(space); + const { ok: reader2 } = journal.reader(space2); + const { ok: writer1 } = journal.writer(space); + const { ok: writer2 } = journal.writer(space2); + + expect(reader1).toBeDefined(); + expect(reader2).toBeDefined(); + expect(writer1).toBeDefined(); + expect(writer2).toBeDefined(); + expect(reader1).not.toBe(reader2); + expect(writer1).not.toBe(writer2); + }); + + it("should isolate operations between spaces", () => { + const { ok: writer1 } = journal.writer(space); + const { ok: writer2 } = journal.writer(space2); + const address = { + id: "test:isolation", + type: "application/json", + path: [], + } as const; + + // Write to space1 + writer1!.write(address, { space: "space1" }); + + // Write to space2 + writer2!.write(address, { space: "space2" }); + + // Read from space1 + const result1 = writer1!.read(address); + expect(result1.ok?.value).toEqual({ space: "space1" }); + + // Read from space2 + const result2 = writer2!.read(address); + expect(result2.ok?.value).toEqual({ space: "space2" }); + + // Check novelty is isolated + const novelty1 = [...journal.novelty(space)]; + const novelty2 = [...journal.novelty(space2)]; + expect(novelty1).toHaveLength(1); + expect(novelty2).toHaveLength(1); + expect(novelty1[0].value).toEqual({ space: "space1" }); + expect(novelty2[0].value).toEqual({ space: "space2" }); + }); + }); + + describe("Transaction Lifecycle", () => { + it("should close successfully with no changes", () => { + const { ok: archive, error } = journal.close(); + expect(error).toBeUndefined(); + expect(archive).toBeDefined(); + expect(archive!.size).toBe(0); + expect(journal.status).toBe("closed"); + }); + + it("should close successfully with changes", () => { + const { ok: writer } = journal.writer(space); + const address = { + id: "test:close", + type: "application/json", + path: [], + } as const; + + writer!.write(address, { test: "data" }); + + const { ok: archive, error } = journal.close(); + expect(error).toBeUndefined(); + expect(archive).toBeDefined(); + expect(archive!.size).toBe(1); + expect(archive!.has(space)).toBe(true); + expect(journal.status).toBe("closed"); + }); + + it("should abort successfully", () => { + const { ok: writer } = journal.writer(space); + writer!.write({ + id: "test:abort", + type: "application/json", + path: [], + }, { test: "data" }); + + const reason = "test abort"; + const result = journal.abort(reason); + expect(result.ok).toBeDefined(); + expect(journal.status).toBe("closed"); + }); + + it("should fail operations after closing", () => { + journal.close(); + + const readerResult = journal.reader(space); + expect(readerResult.error).toBeDefined(); + expect(readerResult.error?.name).toBe("StorageTransactionCompleteError"); + + const writerResult = journal.writer(space); + expect(writerResult.error).toBeDefined(); + expect(writerResult.error?.name).toBe("StorageTransactionCompleteError"); + }); + + it("should fail operations after aborting", () => { + journal.abort("test reason"); + + const readerResult = journal.reader(space); + expect(readerResult.error).toBeDefined(); + expect(readerResult.error?.name).toBe("StorageTransactionAborted"); + + const writerResult = journal.writer(space); + expect(writerResult.error).toBeDefined(); + expect(writerResult.error?.name).toBe("StorageTransactionAborted"); + }); + + it("should handle multiple close attempts", () => { + const result1 = journal.close(); + expect(result1.ok).toBeDefined(); + + const result2 = journal.close(); + expect(result2.error).toBeDefined(); + expect(result2.error?.name).toBe("StorageTransactionCompleteError"); + }); + + it("should handle multiple abort attempts", () => { + const result1 = journal.abort("reason1"); + expect(result1.ok).toBeDefined(); + + const result2 = journal.abort("reason2"); + expect(result2.error).toBeDefined(); + expect(result2.error?.name).toBe("StorageTransactionAborted"); + }); + + it("should fail reader operations after journal is closed", () => { + const { ok: reader } = journal.reader(space); + expect(reader).toBeDefined(); + + journal.close(); + + const newReaderResult = journal.reader(space); + expect(newReaderResult.error).toBeDefined(); + expect(newReaderResult.error?.name).toBe("StorageTransactionCompleteError"); + + const readResult = reader!.read({ + id: "test:closed", + type: "application/json", + path: [], + }); + expect(readResult.error).toBeDefined(); + expect(readResult.error?.name).toBe("StorageTransactionCompleteError"); + }); + + it("should fail reader operations after journal is aborted", () => { + const { ok: reader } = journal.reader(space); + expect(reader).toBeDefined(); + + journal.abort("test abort"); + + const newReaderResult = journal.reader(space); + expect(newReaderResult.error).toBeDefined(); + expect(newReaderResult.error?.name).toBe("StorageTransactionAborted"); + + const readResult = reader!.read({ + id: "test:aborted", + type: "application/json", + path: [], + }); + expect(readResult.error).toBeDefined(); + expect(readResult.error?.name).toBe("StorageTransactionAborted"); + }); + + it("should fail writer operations after journal is closed", () => { + const { ok: writer } = journal.writer(space); + expect(writer).toBeDefined(); + + journal.close(); + + const newWriterResult = journal.writer(space); + expect(newWriterResult.error).toBeDefined(); + expect(newWriterResult.error?.name).toBe("StorageTransactionCompleteError"); + + const readResult = writer!.read({ + id: "test:closed-write", + type: "application/json", + path: [], + }); + expect(readResult.error).toBeDefined(); + expect(readResult.error?.name).toBe("StorageTransactionCompleteError"); + + const writeResult = writer!.write({ + id: "test:closed-write", + type: "application/json", + path: [], + }, { test: "data" }); + expect(writeResult.error).toBeDefined(); + expect(writeResult.error?.name).toBe("StorageTransactionCompleteError"); + }); + + it("should fail writer operations after journal is aborted", () => { + const { ok: writer } = journal.writer(space); + expect(writer).toBeDefined(); + + journal.abort("test abort"); + + const newWriterResult = journal.writer(space); + expect(newWriterResult.error).toBeDefined(); + expect(newWriterResult.error?.name).toBe("StorageTransactionAborted"); + + const readResult = writer!.read({ + id: "test:aborted-write", + type: "application/json", + path: [], + }); + expect(readResult.error).toBeDefined(); + expect(readResult.error?.name).toBe("StorageTransactionAborted"); + + const writeResult = writer!.write({ + id: "test:aborted-write", + type: "application/json", + path: [], + }, { test: "data" }); + expect(writeResult.error).toBeDefined(); + expect(writeResult.error?.name).toBe("StorageTransactionAborted"); + }); + }); + + describe("Read-After-Write Consistency", () => { + it("should maintain consistency for overlapping writes", () => { + const { ok: writer } = journal.writer(space); + const address = { + id: "test:consistency", + type: "application/json", + path: [], + } as const; + + // First write + writer!.write(address, { a: 1, b: 2 }); + + // Overlapping write + writer!.write(address, { a: 10, c: 3 }); + + // Should get the latest write + const result = writer!.read(address); + expect(result.ok?.value).toEqual({ a: 10, c: 3 }); + }); + + it("should handle mixed reads from replica and writes", async () => { + // Pre-populate replica + const replica = storage.open(space).replica; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:consistency", + is: { name: "Grace", age: 35 }, + }), + ], + claims: [], + }); + + const freshJournal = Journal.open(storage); + const { ok: writer } = freshJournal.writer(space); + const rootAddress = { + id: "user:consistency", + type: "application/json", + path: [], + } as const; + const ageAddress = { + ...rootAddress, + path: ["age"], + } as const; + + // First read from replica + const initialRead = writer!.read(rootAddress); + expect(initialRead.ok?.value).toEqual({ name: "Grace", age: 35 }); + + // Write to nested path + writer!.write(ageAddress, 36); + + // Read root again - should have updated age + const finalRead = writer!.read(rootAddress); + expect(finalRead.ok?.value).toEqual({ name: "Grace", age: 36 }); + }); + }); + + describe("Error Handling", () => { + it("should handle reading invalid nested paths", () => { + const { ok: writer } = journal.writer(space); + const rootAddress = { + id: "test:error", + type: "application/json", + path: [], + } as const; + + // Write a non-object value + writer!.write(rootAddress, "not an object"); + + // Try to read nested path + const result = writer!.read({ + ...rootAddress, + path: ["property"], + }); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle writing to invalid nested paths", () => { + const { ok: writer } = journal.writer(space); + const rootAddress = { + id: "test:write-error", + type: "application/json", + path: [], + } as const; + + // Write a string + writer!.write(rootAddress, "hello"); + + // Try to write to nested path + const result = writer!.write( + { ...rootAddress, path: ["property"] }, + "value", + ); + + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle deleting properties with undefined", () => { + const { ok: writer } = journal.writer(space); + const rootAddress = { + id: "test:delete", + type: "application/json", + path: [], + } as const; + + // Write object + writer!.write(rootAddress, { name: "Henry", age: 40 }); + + // Delete property + writer!.write({ ...rootAddress, path: ["age"] }, undefined); + + // Read should not have the deleted property + const result = writer!.read(rootAddress); + expect(result.ok?.value).toEqual({ name: "Henry" }); + }); + }); + + describe("History and Novelty Tracking", () => { + it("should track detailed activity for reads and writes", () => { + const { ok: writer } = journal.writer(space); + const { ok: reader } = journal.reader(space); + + const address = { + id: "user:activity", + type: "application/json", + path: [], + } as const; + + // Initial activity should be empty + const initialActivity = [...journal.activity()]; + expect(initialActivity).toHaveLength(0); + + // Write operation + writer!.write(address, { name: "David" }); + + // Read operation + reader!.read(address); + + // Check activity log + const activity = [...journal.activity()]; + expect(activity).toHaveLength(2); + + expect(activity[0]).toHaveProperty("write"); + expect(activity[0].write).toEqual({ ...address, space }); + + expect(activity[1]).toHaveProperty("read"); + expect(activity[1].read).toEqual({ ...address, space }); + }); + + it("should track read invariants in history", async () => { + // Pre-populate replica + const replica = storage.open(space).replica; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:history", + is: { status: "active" }, + }), + ], + claims: [], + }); + + const freshJournal = Journal.open(storage); + const { ok: reader } = freshJournal.reader(space); + const address = { + id: "user:history", + type: "application/json", + path: [], + } as const; + + // First read should capture invariant + const result1 = reader!.read(address); + expect(result1.ok?.value).toEqual({ status: "active" }); + + const historyEntries = [...freshJournal.history(space)]; + expect(historyEntries).toHaveLength(1); + expect(historyEntries[0].address).toEqual(address); + expect(historyEntries[0].value).toEqual({ status: "active" }); + + // Second read should use history + const result2 = reader!.read(address); + expect(result2.ok?.value).toEqual({ status: "active" }); + expect([...freshJournal.history(space)]).toHaveLength(1); + }); + + it("should capture original replica read in history, not merged result", async () => { + // Pre-populate replica + const replica = storage.open(space).replica; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:validation", + is: { name: "Original", count: 10 }, + }), + ], + claims: [], + }); + + const freshJournal = Journal.open(storage); + const { ok: writer } = freshJournal.writer(space); + const rootAddress = { + id: "user:validation", + type: "application/json", + path: [], + } as const; + + // Write some changes (creates novelty) + writer!.write({ ...rootAddress, path: ["name"] }, "Modified"); + writer!.write({ ...rootAddress, path: ["count"] }, 20); + + // Read from replica (should return merged result but capture original in history) + const readResult = writer!.read(rootAddress); + expect(readResult.ok?.value).toEqual({ + name: "Modified", + count: 20, + }); + + // History should capture the ORIGINAL replica read, not the merged result + const historyEntries = [...freshJournal.history(space)]; + expect(historyEntries).toHaveLength(1); + expect(historyEntries[0].value).toEqual({ + name: "Original", // Should be original value from replica + count: 10, // Should be original value from replica + }); + }); + }); + + describe("Edge Cases", () => { + it("should handle empty paths correctly", () => { + const { ok: writer } = journal.writer(space); + const address = { + id: "test:empty-path", + type: "application/json", + path: [], + } as const; + + writer!.write(address, [1, 2, 3]); + const result = writer!.read(address); + expect(result.ok?.value).toEqual([1, 2, 3]); + }); + + it("should handle array index paths", () => { + const { ok: writer } = journal.writer(space); + const rootAddress = { + id: "test:array", + type: "application/json", + path: [], + } as const; + + writer!.write(rootAddress, { items: ["a", "b", "c"] }); + writer!.write({ ...rootAddress, path: ["items", "1"] }, "B"); + + const result = writer!.read(rootAddress); + expect(result.ok?.value).toEqual({ items: ["a", "B", "c"] }); + }); + + it("should handle numeric string paths", () => { + const { ok: writer } = journal.writer(space); + const rootAddress = { + id: "test:numeric", + type: "application/json", + path: [], + } as const; + + writer!.write(rootAddress, { "123": "numeric key" }); + const result = writer!.read({ ...rootAddress, path: ["123"] }); + expect(result.ok?.value).toBe("numeric key"); + }); + }); +}); diff --git a/packages/runner/test/storage-transaction-shim.test.ts b/packages/runner/test/storage-transaction-shim.test.ts index e1f8e6ca5..6d8623129 100644 --- a/packages/runner/test/storage-transaction-shim.test.ts +++ b/packages/runner/test/storage-transaction-shim.test.ts @@ -5,7 +5,7 @@ import { StorageManager } from "@commontools/runner/storage/cache.deno"; import { Identity } from "@commontools/identity"; import { INotFoundError } from "../src/storage/interface.ts"; import { getJSONFromDataURI } from "../src/uri-utils.ts"; -import { IMemoryAddress } from "../src/storage/interface.ts"; +import { IMemorySpaceAddress } from "../src/storage/interface.ts"; const signer = await Identity.fromPassphrase("test operator"); const space = signer.did(); @@ -586,7 +586,7 @@ describe("data: URI behaviors", () => { id: `data:application/json,${encoded}`, type: "application/json", path: ["foo", "bar"], - } as IMemoryAddress; + } as IMemorySpaceAddress; const result = transaction.read(address); expect(result.ok).toBeDefined(); expect(result.ok?.value).toBe(42); @@ -599,7 +599,7 @@ describe("data: URI behaviors", () => { id: "data:application/json", // missing data type: "application/json", path: [], - } as IMemoryAddress; + } as IMemorySpaceAddress; const result = transaction.read(address); expect(result.error).toBeDefined(); expect(result.error?.name).toBe("InvalidDataURIError"); @@ -616,7 +616,7 @@ describe("data: URI behaviors", () => { id: `data:application/json,${invalidJson}`, type: "application/json", path: [], - } as IMemoryAddress; + } as IMemorySpaceAddress; const result = transaction.read(address); expect(result.error).toBeDefined(); expect(result.error?.name).toBe("InvalidDataURIError"); @@ -632,7 +632,7 @@ describe("data: URI behaviors", () => { id: "data:application/json,%7B%7D", type: "application/json", path: [], - } as IMemoryAddress; + } as IMemorySpaceAddress; const result = transaction.write(address, {}); expect(result.error).toBeDefined(); expect(result.error?.name).toBe("UnsupportedMediaTypeError"); diff --git a/packages/runner/test/transaction.test.ts b/packages/runner/test/transaction.test.ts new file mode 100644 index 000000000..85532669b --- /dev/null +++ b/packages/runner/test/transaction.test.ts @@ -0,0 +1,555 @@ +import { afterEach, beforeEach, describe, it } from "@std/testing/bdd"; +import { expect } from "@std/expect"; +import { Identity } from "@commontools/identity"; +import { StorageManager } from "@commontools/runner/storage/cache.deno"; +import * as Transaction from "../src/storage/transaction.ts"; +import { assert } from "@commontools/memory/fact"; + +const signer = await Identity.fromPassphrase("transaction test"); +const signer2 = await Identity.fromPassphrase("transaction test 2"); +const space = signer.did(); +const space2 = signer2.did(); + +describe("StorageTransaction", () => { + let storage: ReturnType; + let transaction: ReturnType; + + beforeEach(() => { + storage = StorageManager.emulate({ as: signer }); + transaction = Transaction.create(storage); + }); + + afterEach(async () => { + await storage?.close(); + }); + + describe("Basic Lifecycle", () => { + it("should start with ready status", () => { + const result = transaction.status(); + expect(result.ok).toBeDefined(); + expect(result.ok?.status).toBe("ready"); + }); + + it("should create reader for a space", () => { + const result = transaction.reader(space); + expect(result.ok).toBeDefined(); + expect(result.ok?.did()).toBe(space); + }); + + it("should create writer for a space", () => { + const result = transaction.writer(space); + expect(result.ok).toBeDefined(); + expect(result.ok?.did()).toBe(space); + }); + + it("should return same reader instance for same space", () => { + const reader1 = transaction.reader(space); + const reader2 = transaction.reader(space); + expect(reader1.ok).toBe(reader2.ok); + }); + + it("should return same writer instance for same space", () => { + const writer1 = transaction.writer(space); + const writer2 = transaction.writer(space); + expect(writer1.ok).toBe(writer2.ok); + }); + + it("should create different readers for different spaces", () => { + const reader1 = transaction.reader(space); + const reader2 = transaction.reader(space2); + + expect(reader1.ok).toBeDefined(); + expect(reader2.ok).toBeDefined(); + expect(reader1.ok).not.toBe(reader2.ok); + expect(reader1.ok?.did()).toBe(space); + expect(reader2.ok?.did()).toBe(space2); + }); + }); + + describe("Write Isolation", () => { + it("should enforce single writer constraint", () => { + // First writer succeeds + const writer1 = transaction.writer(space); + expect(writer1.ok).toBeDefined(); + + // Second writer for different space fails + const writer2 = transaction.writer(space2); + expect(writer2.error).toBeDefined(); + expect(writer2.error?.name).toBe("StorageTransactionWriteIsolationError"); + if (writer2.error?.name === "StorageTransactionWriteIsolationError") { + expect(writer2.error.open).toBe(space); + expect(writer2.error.requested).toBe(space2); + } + }); + + it("should allow multiple readers with single writer", () => { + const writer = transaction.writer(space); + expect(writer.ok).toBeDefined(); + + const reader1 = transaction.reader(space); + const reader2 = transaction.reader(space2); + + expect(reader1.ok).toBeDefined(); + expect(reader2.ok).toBeDefined(); + }); + + it("should allow writer after readers", () => { + const reader1 = transaction.reader(space); + const reader2 = transaction.reader(space2); + + expect(reader1.ok).toBeDefined(); + expect(reader2.ok).toBeDefined(); + + const writer = transaction.writer(space); + expect(writer.ok).toBeDefined(); + }); + }); + + describe("Read/Write Operations", () => { + it("should read and write through transaction interface", () => { + const address = { + space, + id: "test:1", + type: "application/json", + path: [], + } as const; + const value = { name: "Alice", age: 30 }; + + // Write value + const writeResult = transaction.write(address, value); + expect(writeResult.ok).toBeDefined(); + expect(writeResult.ok?.value).toEqual(value); + + // Read value + const readResult = transaction.read(address); + expect(readResult.ok).toBeDefined(); + if (readResult.ok) { + expect(readResult.ok.value).toEqual(value); + } + }); + + it("should handle cross-space operations", () => { + const address1 = { + space, + id: "test:1", + type: "application/json", + path: [], + } as const; + const address2 = { + space: space2, + id: "test:1", + type: "application/json", + path: [], + } as const; + + // Write to first space + const write1 = transaction.write(address1, { space: 1 }); + expect(write1.ok).toBeDefined(); + + // Try to write to second space (should fail due to write isolation) + const write2 = transaction.write(address2, { space: 2 }); + expect(write2.error).toBeDefined(); + expect(write2.error?.name).toBe("StorageTransactionWriteIsolationError"); + + // But reading from second space should work + const read2 = transaction.read(address2); + expect(read2.ok).toBeDefined(); + if (read2.ok) { + expect(read2.ok.value).toBeUndefined(); // No data written + } + }); + }); + + describe("Transaction Abort", () => { + it("should abort successfully", () => { + const writer = transaction.writer(space); + writer.ok!.write({ + id: "test:abort", + type: "application/json", + path: [], + }, { test: "data" }); + + const reason = "test abort"; + const result = transaction.abort(reason); + expect(result.ok).toBeDefined(); + + const status = transaction.status(); + expect(status.error).toBeDefined(); + expect(status.error?.name).toBe("StorageTransactionAborted"); + if (status.error?.name === "StorageTransactionAborted") { + expect(status.error.reason).toBe(reason); + } + }); + + it("should fail operations after abort", () => { + transaction.abort("test"); + + const readerResult = transaction.reader(space); + expect(readerResult.error).toBeDefined(); + expect(readerResult.error?.name).toBe("StorageTransactionCompleteError"); + + const writerResult = transaction.writer(space); + expect(writerResult.error).toBeDefined(); + expect(writerResult.error?.name).toBe("StorageTransactionCompleteError"); + + const readResult = transaction.read({ + space, + id: "test:1", + type: "application/json", + path: [], + }); + expect(readResult.error).toBeDefined(); + + const writeResult = transaction.write({ + space, + id: "test:1", + type: "application/json", + path: [], + }, {}); + expect(writeResult.error).toBeDefined(); + }); + + it("should not abort twice", () => { + const result1 = transaction.abort("first"); + expect(result1.ok).toBeDefined(); + + const result2 = transaction.abort("second"); + expect(result2.error).toBeDefined(); + expect(result2.error?.name).toBe("StorageTransactionCompleteError"); + }); + }); + + describe("Transaction Commit", () => { + it("should commit empty transaction", async () => { + const result = await transaction.commit(); + expect(result.ok).toBeDefined(); + + const status = transaction.status(); + expect(status.ok).toBeDefined(); + expect(status.ok?.status).toBe("done"); + }); + + it("should commit transaction with changes", async () => { + const writer = transaction.writer(space); + const address = { + id: "test:commit", + type: "application/json", + path: [], + } as const; + + writer.ok!.write(address, { committed: true }); + + const result = await transaction.commit(); + expect(result.ok).toBeDefined(); + + // Verify by creating new transaction and reading + const verifyTransaction = Transaction.create(storage); + const verifyResult = verifyTransaction.read({ + space, + id: "test:commit", + type: "application/json", + path: [], + }); + if (verifyResult.ok) { + expect(verifyResult.ok.value).toEqual({ committed: true }); + } else { + expect(verifyResult.ok).toBeDefined(); + } + }); + + it("should transition through pending state", async () => { + const writer = transaction.writer(space); + writer.ok!.write({ + id: "test:pending", + type: "application/json", + path: [], + }, { test: "data" }); + + const commitPromise = transaction.commit(); + + // Check status while committing + const pendingStatus = transaction.status(); + expect(pendingStatus.ok).toBeDefined(); + expect(pendingStatus.ok?.status).toBe("pending"); + + await commitPromise; + + // Check status after commit + const doneStatus = transaction.status(); + expect(doneStatus.ok).toBeDefined(); + expect(doneStatus.ok?.status).toBe("done"); + }); + + it("should fail operations after commit", async () => { + await transaction.commit(); + + const readerResult = transaction.reader(space); + expect(readerResult.error).toBeDefined(); + expect(readerResult.error?.name).toBe("StorageTransactionCompleteError"); + + const writerResult = transaction.writer(space); + expect(writerResult.error).toBeDefined(); + expect(writerResult.error?.name).toBe("StorageTransactionCompleteError"); + }); + + it("should not commit twice", async () => { + const result1 = await transaction.commit(); + expect(result1.ok).toBeDefined(); + + const result2 = await transaction.commit(); + expect(result2.error).toBeDefined(); + expect(result2.error?.name).toBe("StorageTransactionCompleteError"); + }); + + it("should not commit after abort", async () => { + transaction.abort("test"); + + const result = await transaction.commit(); + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionCompleteError"); + }); + + it("should fail commit when replica is modified after read invariant is established", async () => { + // Pre-populate replica with initial data + const replica = storage.open(space).replica; + const v1 = assert({ + the: "application/json", + of: "user:consistency", + is: { name: "Initial", version: 1 }, + }); + + const initialCommit = await replica.commit({ + facts: [v1], + claims: [], + }); + expect(initialCommit.ok).toBeDefined(); + + // Create transaction and establish a read invariant + const freshTransaction = Transaction.create(storage); + const address = { + space, + id: "user:consistency", + type: "application/json", + path: [], + } as const; + + // Read to establish invariant (this locks in the expected value) + const readResult = freshTransaction.read(address); + if (readResult.ok) { + expect(readResult.ok.value).toEqual({ name: "Initial", version: 1 }); + } else { + expect(readResult.ok).toBeDefined(); + } + + // Modify the replica outside the transaction with proper causal reference + const v2 = assert({ + the: "application/json", + of: "user:consistency", + is: { name: "Modified", version: 2 }, + cause: v1, + }); + + const modifyCommit = await replica.commit({ + facts: [v2], + claims: [], + }); + expect(modifyCommit.ok).toBeDefined(); + + // Verify the replica state actually changed + const updatedState = replica.get({ the: "application/json", of: "user:consistency" }); + expect(updatedState?.is).toEqual({ name: "Modified", version: 2 }); + + // Now attempt to commit - should fail due to read invariant violation + const commitResult = await freshTransaction.commit(); + expect(commitResult.error).toBeDefined(); + expect(commitResult.error?.name).toBe("StorageTransactionInconsistent"); + + // Verify transaction status shows failure + const status = freshTransaction.status(); + expect(status.error).toBeDefined(); + expect(status.error?.name).toBe("StorageTransactionInconsistent"); + }); + }); + + describe("Pre-populated Replica Reads", () => { + it("should read existing data from replica", async () => { + // Pre-populate replica + const replica = storage.open(space).replica; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "user:existing", + is: { name: "Bob", status: "active" }, + }), + ], + claims: [], + }); + + // Create new transaction and read + const freshTransaction = Transaction.create(storage); + const address = { + space, + id: "user:existing", + type: "application/json", + path: [], + } as const; + + const result = freshTransaction.read(address); + expect(result.ok).toBeDefined(); + if (result.ok) { + expect(result.ok.value).toEqual({ name: "Bob", status: "active" }); + } + }); + + it("should handle nested path reads from replica", async () => { + // Pre-populate replica + const replica = storage.open(space).replica; + await replica.commit({ + facts: [ + assert({ + the: "application/json", + of: "config:nested", + is: { + database: { + host: "localhost", + port: 5432, + credentials: { user: "admin" }, + }, + }, + }), + ], + claims: [], + }); + + const freshTransaction = Transaction.create(storage); + const nestedAddress = { + space, + id: "config:nested", + type: "application/json", + path: ["database", "credentials", "user"], + } as const; + + const result = freshTransaction.read(nestedAddress); + if (result.ok) { + expect(result.ok.value).toBe("admin"); + } else { + expect(result.ok).toBeDefined(); + } + }); + }); + + describe("Error Handling", () => { + it("should handle reading invalid nested paths", () => { + const writer = transaction.writer(space); + const rootAddress = { + space, + id: "test:error", + type: "application/json", + path: [], + } as const; + + // Write a non-object value + writer.ok!.write(rootAddress, "not an object"); + + // Try to read nested path + const nestedAddress = { + ...rootAddress, + path: ["property"], + } as const; + + const result = transaction.read(nestedAddress); + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + + it("should handle writing to invalid nested paths", () => { + const address = { + space, + id: "test:write-error", + type: "application/json", + path: [], + } as const; + + // Write a string + transaction.write(address, "hello"); + + // Try to write to nested path + const nestedAddress = { + ...address, + path: ["property"], + } as const; + + const result = transaction.write(nestedAddress, "value"); + expect(result.error).toBeDefined(); + expect(result.error?.name).toBe("StorageTransactionInconsistent"); + }); + }); + + describe("Edge Cases", () => { + it("should handle operations on transaction with no writer", async () => { + // Only create readers, no writers + const reader1 = transaction.reader(space); + const reader2 = transaction.reader(space2); + + expect(reader1.ok).toBeDefined(); + expect(reader2.ok).toBeDefined(); + + // Commit should still work + const result = await transaction.commit(); + expect(result.ok).toBeDefined(); + }); + + it("should handle undefined values for deletion", () => { + const rootAddress = { + space, + id: "test:delete", + type: "application/json", + path: [], + } as const; + + // Write object + transaction.write(rootAddress, { name: "Eve", age: 28 }); + + // Delete property + const propAddress = { + ...rootAddress, + path: ["age"], + } as const; + transaction.write(propAddress, undefined); + + // Read should not have the deleted property + const result = transaction.read(rootAddress); + if (result.ok) { + expect(result.ok.value).toEqual({ name: "Eve" }); + } else { + expect(result.ok).toBeDefined(); + } + }); + + it("should handle array operations", () => { + const address = { + space, + id: "test:array", + type: "application/json", + path: [], + } as const; + + transaction.write(address, { items: ["a", "b", "c"] }); + + const itemAddress = { + ...address, + path: ["items", "1"], + } as const; + + transaction.write(itemAddress, "B"); + + const result = transaction.read(address); + if (result.ok) { + expect(result.ok.value).toEqual({ items: ["a", "B", "c"] }); + } else { + expect(result.ok).toBeDefined(); + } + }); + }); +}); \ No newline at end of file