diff --git a/.eslintrc b/.eslintrc index 91bd9533d..2db6084a1 100644 --- a/.eslintrc +++ b/.eslintrc @@ -91,7 +91,8 @@ "sonarjs/no-duplicate-string": "off", // Handled by prettier "@typescript-eslint/indent": ["off"], - "@typescript-eslint/no-floating-promises": "error" + "@typescript-eslint/no-floating-promises": "error", + "no-else-return": "off" }, "overrides": [ diff --git a/docs/sequencer/spec.md b/docs/sequencer/spec.md new file mode 100644 index 000000000..c74e9d2a8 --- /dev/null +++ b/docs/sequencer/spec.md @@ -0,0 +1,76 @@ +## Merkle Tree Stores + +Object we need to store: +(Nodes, Leaves, MaximumIndex) + +Level 1: +Async stores: (InMemory*, Redis*) + +Schema: +Record + +write +getAsync +getMaximumIndexAsync +getLeafLessOrEqualAsync(path) (gives us either our current leaf or previous leaf in case of insert) + +openTransaction() +commit() + +( getLeafByIndex ) + +Level 2: +CachedStore: implements Sync, parent: Async + +Sync: +set +getNode +getLeaf(path) => { leaf: LinkedLeaf, index: bigint } +getMaximumIndex +getLeafLessOrEqual(path) => { leaf: LinkedLeaf, index: bigint } + +Cached: +preloadMerkleWitness(index) +preloadKeys(paths: string[]) +mergeIntoParent() + +Level 3: +SyncCachedStore: implements Sync, parent: Sync +mergeIntoParent() + +preLoading: +input: path +``` +const leaf = parent.getLeaf(path) +if(leaf !== undefined) { + super.cache(leaf); + // Update + preloadMerkleWitness(leaf.index); +} else { + // Insert + const previousLeaf = parent.getLeafLessOrEqual(path); + super.cache(previousLeaf); + preloadMerkleWitness(previousLeaf.index); + const maximumIndex = this.preloadAndGetMaximumINndex(); // super.getMaximumINdex() ?? await parent.getMaximumIndexASync() + preloadMerkleWitness(maximumIndex); +} + +``` + +Sync interface: +Union of LinkedMerkleTreeStore (rename to LinkedLeafStore) + MerkleTreeStore + +Async +Level 1 methods + +InMemoryLeafStore - subset that does leafs + maximumindex +InMemoryMerkleStore - subset that does only merkle nodes + +-> future Redis + +InMemoryAsyncLinkedMerkleTreeStore - implements Async +uses inmemory implementations + + + + diff --git a/packages/api/src/graphql/VanillaGraphqlModules.ts b/packages/api/src/graphql/VanillaGraphqlModules.ts index e7859ec9d..f94d29128 100644 --- a/packages/api/src/graphql/VanillaGraphqlModules.ts +++ b/packages/api/src/graphql/VanillaGraphqlModules.ts @@ -6,7 +6,7 @@ import { QueryGraphqlModule } from "./modules/QueryGraphqlModule"; import { BatchStorageResolver } from "./modules/BatchStorageResolver"; import { NodeStatusResolver } from "./modules/NodeStatusResolver"; import { BlockResolver } from "./modules/BlockResolver"; -import { MerkleWitnessResolver } from "./modules/MerkleWitnessResolver"; +import { LinkedMerkleWitnessResolver as MerkleWitnessResolver } from "./modules/LinkedMerkleWitnessResolver"; export type VanillaGraphqlModulesRecord = { MempoolResolver: typeof MempoolResolver; diff --git a/packages/api/src/graphql/modules/LeafResolver.ts b/packages/api/src/graphql/modules/LeafResolver.ts new file mode 100644 index 000000000..568060233 --- /dev/null +++ b/packages/api/src/graphql/modules/LeafResolver.ts @@ -0,0 +1,28 @@ +import { Field, ObjectType } from "type-graphql"; +import { LinkedLeafStruct } from "@proto-kit/common"; + +@ObjectType() +export class LeafDTO { + public static fromServiceLayerModel(leaf: LinkedLeafStruct) { + return new LeafDTO( + leaf.value.toString(), + leaf.path.toString(), + leaf.nextPath.toString() + ); + } + + @Field() + value: string; + + @Field() + path: string; + + @Field() + nextPath: string; + + private constructor(value: string, path: string, nextPath: string) { + this.value = value; + this.path = path; + this.nextPath = nextPath; + } +} diff --git a/packages/api/src/graphql/modules/LinkedMerkleWitnessResolver.ts b/packages/api/src/graphql/modules/LinkedMerkleWitnessResolver.ts new file mode 100644 index 000000000..2a016dfc0 --- /dev/null +++ b/packages/api/src/graphql/modules/LinkedMerkleWitnessResolver.ts @@ -0,0 +1,64 @@ +import { Arg, Field, ObjectType, Query } from "type-graphql"; +import { inject } from "tsyringe"; +import { + LinkedMerkleTree, + LinkedMerkleTreeReadWitness, +} from "@proto-kit/common"; +import { + AsyncLinkedLeafStore, + CachedLinkedLeafStore, +} from "@proto-kit/sequencer"; + +import { GraphqlModule, graphqlModule } from "../GraphqlModule"; + +import { MerkleWitnessDTO } from "./MerkleWitnessResolver"; +import { LeafDTO } from "./LeafResolver"; + +@ObjectType() +export class LinkedMerkleWitnessDTO { + public static fromServiceLayerObject(witness: LinkedMerkleTreeReadWitness) { + const { leaf, merkleWitness } = witness; + const leafDTO = LeafDTO.fromServiceLayerModel(leaf); + const witnessDTO = MerkleWitnessDTO.fromServiceLayerObject(merkleWitness); + return new LinkedMerkleWitnessDTO(leafDTO, witnessDTO); + } + + public constructor(leaf: LeafDTO, witness: MerkleWitnessDTO) { + this.leaf = leaf; + this.merkleWitness = new MerkleWitnessDTO( + witness.siblings, + witness.isLefts + ); + } + + @Field(() => LeafDTO) + public leaf: LeafDTO; + + @Field(() => MerkleWitnessDTO) + public merkleWitness: MerkleWitnessDTO; +} + +@graphqlModule() +export class LinkedMerkleWitnessResolver extends GraphqlModule { + public constructor( + @inject("AsyncMerkleStore") + private readonly treeStore: AsyncLinkedLeafStore + ) { + super(); + } + + @Query(() => LinkedMerkleWitnessDTO, { + description: + "Allows retrieval of merkle witnesses corresponding to a specific path in the appchain's state tree. These proves are generally retrieved from the current 'proven' state", + }) + public async witness(@Arg("path") path: string) { + const syncStore = await CachedLinkedLeafStore.new(this.treeStore); + + const tree = new LinkedMerkleTree(syncStore.treeStore, syncStore); + await syncStore.preloadKey(BigInt(path)); + + const witness = tree.getReadWitness(BigInt(path)); + + return LinkedMerkleWitnessDTO.fromServiceLayerObject(witness); + } +} diff --git a/packages/api/src/graphql/modules/MerkleWitnessResolver.ts b/packages/api/src/graphql/modules/MerkleWitnessResolver.ts index 8c59b8cc8..a39324ab0 100644 --- a/packages/api/src/graphql/modules/MerkleWitnessResolver.ts +++ b/packages/api/src/graphql/modules/MerkleWitnessResolver.ts @@ -1,10 +1,15 @@ import { Arg, Field, ObjectType, Query } from "type-graphql"; import { Length } from "class-validator"; import { inject } from "tsyringe"; -import { RollupMerkleTree, RollupMerkleTreeWitness } from "@proto-kit/common"; import { - AsyncMerkleTreeStore, - CachedMerkleTreeStore, + LinkedLeafStruct, + LinkedMerkleTree, + LinkedMerkleTreeReadWitness, + RollupMerkleTreeWitness, +} from "@proto-kit/common"; +import { + AsyncLinkedLeafStore, + CachedLinkedLeafStore, } from "@proto-kit/sequencer"; import { GraphqlModule, graphqlModule } from "../GraphqlModule"; @@ -31,26 +36,79 @@ export class MerkleWitnessDTO { public isLefts: boolean[]; } +@ObjectType() +export class LinkedLeafDTO { + public static fromServiceLayerObject({ + path, + value, + nextPath, + }: LinkedLeafStruct) { + return new LinkedLeafDTO( + path.toString(), + value.toString(), + nextPath.toString() + ); + } + + constructor(path: string, value: string, nextPath: string) { + this.path = path; + this.value = value; + this.nextPath = nextPath; + } + + @Field(() => String) + public path: string; + + @Field(() => String) + public value: string; + + @Field(() => String) + public nextPath: string; +} + +@ObjectType() +export class LinkedTreeWitnessDTO { + public static fromServiceLayerObject(witness: LinkedMerkleTreeReadWitness) { + const merkleWitness = MerkleWitnessDTO.fromServiceLayerObject( + witness.merkleWitness + ); + const linkedLeaf = LinkedLeafDTO.fromServiceLayerObject(witness.leaf); + return new LinkedTreeWitnessDTO(merkleWitness, linkedLeaf); + } + + public constructor(merkleWitness: MerkleWitnessDTO, leaf: LinkedLeafDTO) { + this.merkleWitness = merkleWitness; + this.leaf = leaf; + } + + @Field(() => MerkleWitnessDTO) + public merkleWitness: MerkleWitnessDTO; + + @Field(() => LinkedLeafDTO) + public leaf: LinkedLeafDTO; +} + @graphqlModule() export class MerkleWitnessResolver extends GraphqlModule { public constructor( - @inject("AsyncMerkleStore") private readonly treeStore: AsyncMerkleTreeStore + @inject("AsyncLinkedLeafStore") + private readonly treeStore: AsyncLinkedLeafStore ) { super(); } - @Query(() => MerkleWitnessDTO, { + @Query(() => LinkedTreeWitnessDTO, { description: "Allows retrieval of merkle witnesses corresponding to a specific path in the appchain's state tree. These proves are generally retrieved from the current 'proven' state", }) public async witness(@Arg("path") path: string) { - const syncStore = new CachedMerkleTreeStore(this.treeStore); + const syncStore = await CachedLinkedLeafStore.new(this.treeStore); await syncStore.preloadKey(BigInt(path)); - const tree = new RollupMerkleTree(syncStore); + const tree = new LinkedMerkleTree(syncStore.treeStore, syncStore); - const witness = tree.getWitness(BigInt(path)); + const witness = tree.getReadWitness(BigInt(path)); - return MerkleWitnessDTO.fromServiceLayerObject(witness); + return LinkedTreeWitnessDTO.fromServiceLayerObject(witness); } } diff --git a/packages/api/src/index.ts b/packages/api/src/index.ts index 5a647dc29..c66e68a47 100644 --- a/packages/api/src/index.ts +++ b/packages/api/src/index.ts @@ -9,5 +9,6 @@ export * from "./graphql/modules/NodeStatusResolver"; export * from "./graphql/modules/AdvancedNodeStatusResolver"; export * from "./graphql/services/NodeStatusService"; export * from "./graphql/modules/MerkleWitnessResolver"; +export * from "./graphql/modules/LinkedMerkleWitnessResolver"; export * from "./graphql/VanillaGraphqlModules"; export * from "./metrics/OpenTelemetryServer"; diff --git a/packages/common/package.json b/packages/common/package.json index a5c8e1d79..e4897bde0 100644 --- a/packages/common/package.json +++ b/packages/common/package.json @@ -20,7 +20,8 @@ "lodash": "^4.17.21", "loglevel": "^1.8.1", "reflect-metadata": "^0.1.13", - "typescript-memoize": "^1.1.1" + "typescript-memoize": "^1.1.1", + "ts-mixer": "^6.0.3" }, "peerDependencies": { "o1js": "^1.6.0", diff --git a/packages/common/src/index.ts b/packages/common/src/index.ts index 1973cccdb..cec176fcd 100644 --- a/packages/common/src/index.ts +++ b/packages/common/src/index.ts @@ -13,12 +13,18 @@ export * from "./dependencyFactory/injectOptional"; export * from "./log"; export * from "./events/EventEmittingComponent"; export * from "./events/EventEmitter"; -export * from "./trees/MerkleTreeStore"; -export * from "./trees/InMemoryMerkleTreeStorage"; -export * from "./trees/RollupMerkleTree"; +export * from "./trees/sparse/MerkleTreeStore"; +export * from "./trees/sparse/InMemoryMerkleTreeStorage"; +export * from "./trees/sparse/RollupMerkleTree"; +export * from "./trees/lmt/LinkedLeafStore"; +export * from "./trees/lmt/LinkedMerkleTree"; +export * from "./trees/lmt/InMemoryLinkedLeafStore"; +export * from "./trees/lmt/LinkedMerkleTreeCircuitOps"; +export * from "./trees/lmt/AbstractLinkedMerkleTree"; +export * from "./trees/lmt/LinkedMerkleTreeTypes"; export * from "./events/EventEmitterProxy"; export * from "./events/ReplayingSingleUseEventEmitter"; -export * from "./trees/MockAsyncMerkleStore"; +export * from "./trees/sparse/MockAsyncMerkleStore"; export * from "./compiling/AtomicCompileHelper"; export * from "./compiling/CompileRegistry"; export * from "./compiling/CompilableModule"; diff --git a/packages/common/src/trees/VirtualMerkleTreeStore.ts b/packages/common/src/trees/VirtualMerkleTreeStore.ts deleted file mode 100644 index a0cbd3127..000000000 --- a/packages/common/src/trees/VirtualMerkleTreeStore.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { MerkleTreeStore } from "./MerkleTreeStore"; -import { InMemoryMerkleTreeStorage } from "./InMemoryMerkleTreeStorage"; - -/** - * A MemoryMerkleTreeStore that, if falls back to a parent store if it - * has no data - */ -export class VirtualMerkleTreeStore extends InMemoryMerkleTreeStorage { - public constructor(private readonly parent: MerkleTreeStore) { - super(); - } - - public getNode(key: bigint, level: number): bigint | undefined { - return super.getNode(key, level) ?? this.parent.getNode(key, level); - } - - public setNode(key: bigint, level: number, value: bigint): void { - super.setNode(key, level, value); - } -} diff --git a/packages/common/src/trees/lmt/AbstractLinkedMerkleTree.ts b/packages/common/src/trees/lmt/AbstractLinkedMerkleTree.ts new file mode 100644 index 000000000..795dc5129 --- /dev/null +++ b/packages/common/src/trees/lmt/AbstractLinkedMerkleTree.ts @@ -0,0 +1,100 @@ +import { Bool, Field, Struct } from "o1js"; + +import { createMerkleTree, RollupMerkleTree } from "../sparse/RollupMerkleTree"; +import { TypedClass } from "../../types"; +import { MerkleTreeStore } from "../sparse/MerkleTreeStore"; + +import { LinkedLeafStore } from "./LinkedLeafStore"; +import { LinkedLeafStruct } from "./LinkedMerkleTreeTypes"; + +class RollupMerkleTreeWitness extends createMerkleTree(40).WITNESS {} + +type LinkedMerkleWitnessValue = { + leaf: LinkedLeafStruct; + merkleWitness: RollupMerkleTreeWitness; + checkMembership(root: Field, path: Field, value: Field): Bool; +}; + +// We use the RollupMerkleTreeWitness here, although we will actually implement +// the RollupMerkleTreeWitnessV2 defined below when instantiating the class. +class LinkedMerkleWitnessTemplate extends Struct({ + leaf: LinkedLeafStruct, + merkleWitness: RollupMerkleTreeWitness, +}) { + public checkMembership(root: Field, path: Field, value: Field): Bool { + // Mock implementation for typing + return Bool(true); + } +} + +type LinkedOperationWitnessValue = { + leafPrevious: LinkedMerkleWitnessValue; + leafCurrent: LinkedMerkleWitnessValue; +}; + +class LinkedOperationWitnessTemplate extends Struct({ + leafPrevious: LinkedMerkleWitnessTemplate, + leafCurrent: LinkedMerkleWitnessTemplate, +}) {} + +export interface AbstractLinkedMerkleTree { + leafStore: LinkedLeafStore; + + tree: RollupMerkleTree; + + /** + * Returns the root of the [Merkle Tree](https://en.wikipedia.org/wiki/Merkle_tree). + * @returns The root of the Merkle Tree. + */ + getRoot(): Field; + + /** + * Sets the value of a leaf node at a given index to a given value. + * @param path of the leaf node. + * @param value New value. + */ + setLeaf(path: bigint, value?: bigint): LinkedOperationWitnessValue; + + /** + * Returns a leaf which lives at a given path. + * Errors otherwise. + * @param path Index of the node. + * @returns The data of the leaf. + */ + getLeaf(path: bigint): LinkedLeafStruct | undefined; + + /** + * Returns the witness (also known as + * [Merkle Proof or Merkle Witness](https://computersciencewiki.org/index.php/Merkle_proof)) + * for the leaf at the given path. + * @param path Position of the leaf node. + * @returns The witness that belongs to the leaf. + */ + getReadWitness(path: bigint): LinkedMerkleWitnessValue; +} + +export interface AbstractLinkedMerkleTreeClass { + new ( + store: MerkleTreeStore, + leafStore: LinkedLeafStore + ): AbstractLinkedMerkleTree; + + WITNESS: typeof LinkedOperationWitnessTemplate & { + fromReadWitness( + readWitness: LinkedMerkleWitnessTemplate + ): LinkedOperationWitnessTemplate; + }; + + READ_WITNESS: typeof LinkedMerkleWitnessTemplate & + TypedClass<{ + checkMembership(root: Field, path: Field, value: Field): Bool; + }>; + + HEIGHT: number; + + EMPTY_ROOT: Field; + + dummyWitness(): LinkedOperationWitnessValue; + + dummyReadWitness(): LinkedMerkleWitnessValue; +} diff --git a/packages/common/src/trees/lmt/InMemoryLinkedLeafStore.ts b/packages/common/src/trees/lmt/InMemoryLinkedLeafStore.ts new file mode 100644 index 000000000..fa395f8ed --- /dev/null +++ b/packages/common/src/trees/lmt/InMemoryLinkedLeafStore.ts @@ -0,0 +1,42 @@ +import { LinkedLeafStore, LinkedLeaf } from "./LinkedLeafStore"; + +export class InMemoryLinkedLeafStore implements LinkedLeafStore { + public leaves: { + [key: string]: { leaf: LinkedLeaf; index: bigint }; + } = {}; + + public maximumIndex?: bigint; + + public getLeaf( + path: bigint + ): { leaf: LinkedLeaf; index: bigint } | undefined { + return this.leaves[path.toString()]; + } + + public setLeaf(index: bigint, value: LinkedLeaf): void { + const leaf = this.getLeaf(value.path); + + if (leaf !== undefined && leaf?.index !== index) { + throw new Error("Cannot change index of existing leaf"); + } + + this.leaves[value.path.toString()] = { leaf: value, index: index }; + if (index > (this.maximumIndex ?? -1)) { + this.maximumIndex = index; + } + } + + public getMaximumIndex(): bigint | undefined { + return this.maximumIndex; + } + + // This gets the leaf with the closest path. + public getPreviousLeaf( + path: bigint + ): { leaf: LinkedLeaf; index: bigint } | undefined { + return Object.values(this.leaves).find( + (storedLeaf) => + storedLeaf.leaf.nextPath > path && storedLeaf.leaf.path < path + ); + } +} diff --git a/packages/common/src/trees/lmt/LinkedLeafStore.ts b/packages/common/src/trees/lmt/LinkedLeafStore.ts new file mode 100644 index 000000000..a9c15ff5f --- /dev/null +++ b/packages/common/src/trees/lmt/LinkedLeafStore.ts @@ -0,0 +1,13 @@ +export type LinkedLeaf = { value: bigint; path: bigint; nextPath: bigint }; + +export type StoredLeaf = { leaf: LinkedLeaf; index: bigint }; + +export interface LinkedLeafStore { + setLeaf: (index: bigint, value: LinkedLeaf) => void; + + getLeaf: (path: bigint) => StoredLeaf | undefined; + + getPreviousLeaf: (path: bigint) => StoredLeaf | undefined; + + getMaximumIndex: () => bigint | undefined; +} diff --git a/packages/common/src/trees/lmt/LinkedMerkleTree.ts b/packages/common/src/trees/lmt/LinkedMerkleTree.ts new file mode 100644 index 000000000..213d5ae55 --- /dev/null +++ b/packages/common/src/trees/lmt/LinkedMerkleTree.ts @@ -0,0 +1,272 @@ +import { Field, Struct } from "o1js"; + +import { createMerkleTree, RollupMerkleTree } from "../sparse/RollupMerkleTree"; +import { MerkleTreeStore } from "../sparse/MerkleTreeStore"; +import { InMemoryMerkleTreeStorage } from "../sparse/InMemoryMerkleTreeStorage"; + +import { InMemoryLinkedLeafStore } from "./InMemoryLinkedLeafStore"; +import { LinkedLeaf, LinkedLeafStore } from "./LinkedLeafStore"; +import { LinkedLeafStruct } from "./LinkedMerkleTreeTypes"; +import { + AbstractLinkedMerkleTree, + AbstractLinkedMerkleTreeClass, +} from "./AbstractLinkedMerkleTree"; + +export function createLinkedMerkleTree( + height: number +): AbstractLinkedMerkleTreeClass { + const SparseTreeClass = createMerkleTree(height); + + class LinkedLeafAndMerkleWitness extends Struct({ + leaf: LinkedLeafStruct, + merkleWitness: SparseTreeClass.WITNESS, + }) { + public checkMembership(root: Field, path: Field, value: Field) { + const pathEquals = path.equals(this.leaf.path); + + return this.merkleWitness + .calculateRoot( + new LinkedLeafStruct({ + ...this.leaf, + value, + }).hash() + ) + .equals(root) + .and(pathEquals); + } + } + + class LinkedOperationWitness extends Struct({ + leafPrevious: LinkedLeafAndMerkleWitness, + leafCurrent: LinkedLeafAndMerkleWitness, + }) { + // implements LinkedStructTemplate + public static fromReadWitness(readWitness: LinkedLeafAndMerkleWitness) { + return new LinkedOperationWitness({ + leafPrevious: new LinkedLeafAndMerkleWitness({ + merkleWitness: SparseTreeClass.WITNESS.dummy(), + leaf: LinkedLeafStruct.dummy(), + }), + leafCurrent: readWitness, + }); + } + } + + return class AbstractLinkedRollupMerkleTree + implements AbstractLinkedMerkleTree + { + public static HEIGHT = height; + + public static EMPTY_ROOT = new AbstractLinkedRollupMerkleTree( + new InMemoryMerkleTreeStorage(), + new InMemoryLinkedLeafStore() + ).getRoot(); + + public static READ_WITNESS = LinkedLeafAndMerkleWitness; + + public static WITNESS = LinkedOperationWitness; + + readonly tree: RollupMerkleTree; + + readonly leafStore: LinkedLeafStore; + + public constructor(store: MerkleTreeStore, leafStore: LinkedLeafStore) { + this.leafStore = leafStore; + + this.tree = new SparseTreeClass(store); + + // We only do the leaf initialisation when the store + // has no values. Otherwise, we leave the store + // as is to not overwrite any data. + if (this.leafStore.getLeaf(0n) === undefined) { + this.setLeafInitialisation(); + } + } + + /** + * Returns leaf which lives at a given path. + * Errors if the path is not defined. + * @param path path of the node. + * @returns The data of the node. + */ + public getLeaf(path: bigint): LinkedLeafStruct | undefined { + const storedLeaf = this.leafStore.getLeaf(path); + if (storedLeaf === undefined) { + return undefined; + } + return new LinkedLeafStruct({ + value: Field(storedLeaf.leaf.value), + path: Field(storedLeaf.leaf.path), + nextPath: Field(storedLeaf.leaf.nextPath), + }); + } + + /** + * Returns the root of the [Merkle Tree](https://en.wikipedia.org/wiki/Merkle_tree). + * @returns The root of the Merkle Tree. + */ + public getRoot(): Field { + return this.tree.getRoot().toConstant(); + } + + private setMerkleLeaf(index: bigint, leaf: LinkedLeaf) { + this.leafStore.setLeaf(index, leaf); + + const leafHash = new LinkedLeafStruct( + LinkedLeafStruct.fromValue(leaf) + ).hash(); + this.tree.setLeaf(index, leafHash); + } + + /** + * Sets the value of a node at a given index to a given value. + * @param path Position of the leaf node. + * @param value New value. + */ + public setLeaf(path: bigint, value: bigint): LinkedOperationWitness { + const storedLeaf = this.leafStore.getLeaf(path); + + if (storedLeaf === undefined) { + // Insert case + // The above means the path doesn't already exist, and we are inserting, not updating. + // This requires us to update the node with the previous path, as well. + const tempIndex = this.leafStore.getMaximumIndex(); + if (tempIndex === undefined) { + throw Error("Store Max Index not defined"); + } + if (tempIndex + 1n >= 2 ** height) { + throw new Error("Index greater than maximum leaf number"); + } + const nextFreeIndex = tempIndex + 1n; + + const previousLeaf = this.leafStore.getPreviousLeaf(path); + + if (previousLeaf === undefined) { + throw Error(`Prev leaf shouldn't be undefined (path ${path})`); + } + + const previousLeafMerkleWitness = this.tree.getWitness( + previousLeaf.index + ); + + const newPrevLeaf = { + ...previousLeaf.leaf, + nextPath: path, + }; + this.setMerkleLeaf(previousLeaf.index, newPrevLeaf); + + const currentMerkleWitness = this.tree.getWitness(nextFreeIndex); + + const newLeaf = { + path, + value, + nextPath: previousLeaf.leaf.nextPath, + }; + this.setMerkleLeaf(nextFreeIndex, newLeaf); + + return new LinkedOperationWitness({ + leafPrevious: new LinkedLeafAndMerkleWitness({ + leaf: new LinkedLeafStruct( + LinkedLeafStruct.fromValue(previousLeaf.leaf) + ), + merkleWitness: previousLeafMerkleWitness, + }), + leafCurrent: new LinkedLeafAndMerkleWitness({ + leaf: LinkedLeafStruct.dummy(), + merkleWitness: currentMerkleWitness, + }), + }); + } else { + // Update case + const witnessPrevious = + AbstractLinkedRollupMerkleTree.dummyReadWitness(); + + // TODO This makes an unnecessary leafstore lookup currently, reuse storedLeaf instead + const current = this.getReadWitness(storedLeaf.leaf.path); + + this.setMerkleLeaf(storedLeaf.index, { + ...storedLeaf.leaf, + value: value, + }); + + return new LinkedOperationWitness({ + leafPrevious: witnessPrevious, + leafCurrent: current, + }); + } + } + + /** + * Sets the value of a leaf node at initialisation, + * i.e. {vale: 0, path: 0, nextPath: Field.Max} + */ + private setLeafInitialisation() { + // This is the maximum value of the hash + const MAX_FIELD_VALUE: bigint = Field.ORDER - 1n; + this.leafStore.setLeaf(0n, { + value: 0n, + path: 0n, + nextPath: MAX_FIELD_VALUE, + }); + // We now set the leafs in the merkle tree to cascade the values up + // the tree. + this.setMerkleLeaf(0n, { + value: 0n, + path: 0n, + nextPath: MAX_FIELD_VALUE, + }); + } + + /** + * Returns the witness (also known as + * [Merkle Proof or Merkle Witness](https://computersciencewiki.org/index.php/Merkle_proof)) + * for the leaf at the given path, otherwise returns a witness for the first unused index. + * @param path of the leaf node. + * @returns The witness that belongs to the leaf. + */ + public getReadWitness(path: bigint): LinkedLeafAndMerkleWitness { + const storedLeaf = this.leafStore.getLeaf(path); + let leaf; + let currentIndex: bigint; + + if (storedLeaf === undefined) { + const storeIndex = this.leafStore.getMaximumIndex(); + if (storeIndex === undefined) { + throw new Error("Store undefined"); + } + currentIndex = storeIndex + 1n; + leaf = LinkedLeafStruct.dummy(); + } else { + leaf = new LinkedLeafStruct( + LinkedLeafStruct.fromValue(storedLeaf.leaf) + ); + currentIndex = storedLeaf.index; + } + + const merkleWitness = this.tree.getWitness(currentIndex); + + return new LinkedLeafAndMerkleWitness({ + merkleWitness, + leaf, + }); + } + + public static dummyReadWitness(): LinkedLeafAndMerkleWitness { + return new LinkedLeafAndMerkleWitness({ + merkleWitness: SparseTreeClass.WITNESS.dummy(), + leaf: LinkedLeafStruct.dummy(), + }); + } + + public static dummyWitness() { + return new LinkedOperationWitness({ + leafPrevious: AbstractLinkedRollupMerkleTree.dummyReadWitness(), + leafCurrent: AbstractLinkedRollupMerkleTree.dummyReadWitness(), + }); + } + }; +} + +export class LinkedMerkleTree extends createLinkedMerkleTree(40) {} +export class LinkedMerkleTreeWitness extends LinkedMerkleTree.WITNESS {} +export class LinkedMerkleTreeReadWitness extends LinkedMerkleTree.READ_WITNESS {} diff --git a/packages/common/src/trees/lmt/LinkedMerkleTreeCircuitOps.ts b/packages/common/src/trees/lmt/LinkedMerkleTreeCircuitOps.ts new file mode 100644 index 000000000..bfef7568a --- /dev/null +++ b/packages/common/src/trees/lmt/LinkedMerkleTreeCircuitOps.ts @@ -0,0 +1,188 @@ +import { Bool, Field, Provable, Struct } from "o1js"; + +import { LinkedMerkleTreeWitness } from "./LinkedMerkleTree"; +import { LinkedLeafStruct } from "./LinkedMerkleTreeTypes"; + +/* eslint-disable no-inner-declarations */ +// TODO Add a struct that captures the errors monad-style + +export type TreeWrite = { + path: Field; + from: Field; + to: Field; +}; + +export namespace LinkedMerkleTreeCircuitOps { + function boolAllTrue(...args: Bool[]): Bool { + return args.reduce((a, b, i) => { + // if (!b.toBoolean()) { + // console.log(); + // } + return a.and(b); + }); + } + + export class ComputeRootInstruction extends Struct({ + newPreviousLeaf: LinkedLeafStruct, + newCurrentLeaf: LinkedLeafStruct, + allChecksMet: Bool, + }) {} + + function chooseInstruction( + isUpdate: Bool, + updateInstruction: ComputeRootInstruction, + insertInstruction: ComputeRootInstruction + ): ComputeRootInstruction { + return Provable.if( + isUpdate, + ComputeRootInstruction, + updateInstruction, + insertInstruction + ); + } + + /** + * verifyWitness(current.merkleWitness, current.leaf) + * + * st.path == current.leaf.path + * st.from.value == current.leaf.value + * ``` + * + * updates: + * ``` + * current := { current.path, current.nextPath, value: st.to.value } + */ + function update( + { leafCurrent, leafPrevious }: LinkedMerkleTreeWitness, + { to, from, path }: TreeWrite + ): ComputeRootInstruction { + const allChecksMet = boolAllTrue( + path.equals(leafCurrent.leaf.path), + leafCurrent.leaf.value.equals(from) + ); + + return { + newPreviousLeaf: leafPrevious.leaf, + newCurrentLeaf: new LinkedLeafStruct({ + ...leafCurrent.leaf, + value: to, + }), + allChecksMet, + }; + } + + /** + * st.path == current.leaf.path + * + * previous.leaf.nextPath > current.leaf.path + * previous.leaf.path < current.leaf.path + * previous.leaf.nextPath == current.leaf.nextPath + * + * index(current.merkleWitness) == nextFreeIndex + * + * updates: + * previous := { previous.path, previous.value, nextPath: current.path } + * current := current.leaf + */ + function insert( + witness: LinkedMerkleTreeWitness, + { path, to }: TreeWrite + ): ComputeRootInstruction { + const { leafPrevious: previous, leafCurrent: current } = witness; + + const allChecksMet = boolAllTrue( + // Already covered in general checks + // path.equals(current.leaf.path), + current.leaf.isDummy(), + previous.leaf.nextPath.greaterThan(path), + previous.leaf.path.lessThan(path) + ); + + return { + newPreviousLeaf: new LinkedLeafStruct({ + ...previous.leaf, + nextPath: path, + }), + newCurrentLeaf: new LinkedLeafStruct({ + path, + value: to, + nextPath: previous.leaf.nextPath, + }), + allChecksMet, + }; + } + + function computeRoot( + witness: LinkedMerkleTreeWitness, + newPreviousLeaf: LinkedLeafStruct, + newCurrentLeaf: LinkedLeafStruct, + isUpdate: Bool, + isDummy: Bool, + root: Field + ) { + const { leafPrevious, leafCurrent } = witness; + + leafPrevious.merkleWitness + .calculateRoot(leafPrevious.leaf.hash()) + .equals(root) + .or(isUpdate) + .assertTrue("Previous leaf calculation not matching"); + + const root1 = leafPrevious.merkleWitness.calculateRoot( + newPreviousLeaf.hash() + ); + + const intermediateRoot = Provable.if(isUpdate, root, root1); + + // TODO Make this Provable.if more efficient + const leafCurrentLeaf = Provable.if( + isUpdate, + leafCurrent.leaf.hash(), + Field(0) + ); + leafCurrent.merkleWitness + .calculateRoot(leafCurrentLeaf) + .equals(intermediateRoot) + .or(isDummy) + .assertTrue("Current leaf witness invalid"); + + return leafCurrent.merkleWitness.calculateRoot(newCurrentLeaf.hash()); + } + + export function applyTreeWrite( + root: Field, + witness: LinkedMerkleTreeWitness, + treeWrite: TreeWrite, + index: number + ): Field { + const { leafPrevious, leafCurrent } = witness; + + const isUpdate = leafPrevious.leaf.isDummy(); + const isDummy = leafCurrent.leaf.isDummy().and(isUpdate); + + // For read-only and update + const updateState = update(witness, treeWrite); + + // For insert + const insertState = insert(witness, treeWrite); + + const instruction = chooseInstruction(isUpdate, updateState, insertState); + + instruction.allChecksMet + .or(isDummy) + .assertTrue(`Not all witness checks have been met: ${index}`); + + const newRoot = computeRoot( + witness, + instruction.newPreviousLeaf, + instruction.newCurrentLeaf, + isUpdate, + isDummy, + root + ); + + return Provable.if(isDummy, root, newRoot); + } +} + +/* eslint-enable no-inner-declarations */ diff --git a/packages/common/src/trees/lmt/LinkedMerkleTreeTypes.ts b/packages/common/src/trees/lmt/LinkedMerkleTreeTypes.ts new file mode 100644 index 000000000..ecd1ec970 --- /dev/null +++ b/packages/common/src/trees/lmt/LinkedMerkleTreeTypes.ts @@ -0,0 +1,53 @@ +import { Field, Poseidon, Provable, Struct } from "o1js"; + +export class LinkedLeafStruct extends Struct({ + value: Field, + path: Field, + nextPath: Field, +}) { + public isDummy() { + return this.path.equals(0).and(this.nextPath.equals(0)); + } + + public hash(): Field { + const hash = Poseidon.hash(LinkedLeafStruct.toFields(this)); + return Provable.if(this.isDummy(), Field(0), hash); + } + + public static dummy(): LinkedLeafStruct { + return new LinkedLeafStruct({ + value: Field(0), + path: Field(0), + nextPath: Field(0), + }); + } +} + +// export class LinkedMerkleTreeGlobalState extends Struct({ +// root: Field, +// lastOccupiedIndex: Field, +// }) { +// public static equals( +// state1: LinkedMerkleTreeGlobalState, +// state2: LinkedMerkleTreeGlobalState +// ): Bool { +// return state1.root +// .equals(state2.root) +// .and(state1.lastOccupiedIndex.equals(state2.lastOccupiedIndex)); +// } +// +// public static assertEquals( +// state1: LinkedMerkleTreeGlobalState, +// state2: LinkedMerkleTreeGlobalState, +// msg?: string +// ) { +// state1.root.assertEquals( +// state2.root, +// msg !== undefined ? `${msg}: root` : msg +// ); +// state1.lastOccupiedIndex.assertEquals( +// state2.lastOccupiedIndex, +// msg !== undefined ? `${msg}: lastOccupiedIndex` : msg +// ); +// } +// } diff --git a/packages/common/src/trees/InMemoryMerkleTreeStorage.ts b/packages/common/src/trees/sparse/InMemoryMerkleTreeStorage.ts similarity index 95% rename from packages/common/src/trees/InMemoryMerkleTreeStorage.ts rename to packages/common/src/trees/sparse/InMemoryMerkleTreeStorage.ts index 390b87fc8..c042c0d3a 100644 --- a/packages/common/src/trees/InMemoryMerkleTreeStorage.ts +++ b/packages/common/src/trees/sparse/InMemoryMerkleTreeStorage.ts @@ -1,7 +1,7 @@ import { MerkleTreeStore } from "./MerkleTreeStore"; export class InMemoryMerkleTreeStorage implements MerkleTreeStore { - protected nodes: { + public nodes: { [key: number]: { [key: string]: bigint; }; diff --git a/packages/common/src/trees/MerkleTreeStore.ts b/packages/common/src/trees/sparse/MerkleTreeStore.ts similarity index 100% rename from packages/common/src/trees/MerkleTreeStore.ts rename to packages/common/src/trees/sparse/MerkleTreeStore.ts diff --git a/packages/common/src/trees/MockAsyncMerkleStore.ts b/packages/common/src/trees/sparse/MockAsyncMerkleStore.ts similarity index 94% rename from packages/common/src/trees/MockAsyncMerkleStore.ts rename to packages/common/src/trees/sparse/MockAsyncMerkleStore.ts index 26279aea9..355be3b28 100644 --- a/packages/common/src/trees/MockAsyncMerkleStore.ts +++ b/packages/common/src/trees/sparse/MockAsyncMerkleStore.ts @@ -1,4 +1,4 @@ -import { noop } from "../utils"; +import { noop } from "../../utils"; import { InMemoryMerkleTreeStorage } from "./InMemoryMerkleTreeStorage"; diff --git a/packages/common/src/trees/RollupMerkleTree.ts b/packages/common/src/trees/sparse/RollupMerkleTree.ts similarity index 93% rename from packages/common/src/trees/RollupMerkleTree.ts rename to packages/common/src/trees/sparse/RollupMerkleTree.ts index 5257e3226..e5673ec95 100644 --- a/packages/common/src/trees/RollupMerkleTree.ts +++ b/packages/common/src/trees/sparse/RollupMerkleTree.ts @@ -1,12 +1,12 @@ import { Bool, Field, Poseidon, Provable, Struct } from "o1js"; -import { range } from "../utils"; -import { TypedClass } from "../types"; +import { range } from "../../utils"; +import { TypedClass } from "../../types"; import { MerkleTreeStore } from "./MerkleTreeStore"; import { InMemoryMerkleTreeStorage } from "./InMemoryMerkleTreeStorage"; -class StructTemplate extends Struct({ +export class StructTemplate extends Struct({ path: Provable.Array(Field, 0), isLeft: Provable.Array(Bool, 0), }) {} @@ -16,7 +16,7 @@ export interface AbstractMerkleWitness extends StructTemplate { /** * Calculates a root depending on the leaf value. - * @param leaf Value of the leaf node that belongs to this Witness. + * @param hash Value of the leaf node that belongs to this Witness. * @returns The calculated root. */ calculateRoot(hash: Field): Field; @@ -29,6 +29,8 @@ export interface AbstractMerkleWitness extends StructTemplate { checkMembership(root: Field, key: Field, value: Field): Bool; + checkMembershipSimple(root: Field, value: Field): Bool; + checkMembershipGetRoots( root: Field, key: Field, @@ -115,7 +117,7 @@ export interface AbstractMerkleTreeClass { */ export function createMerkleTree(height: number): AbstractMerkleTreeClass { /** - * The {@link BaseMerkleWitness} class defines a circuit-compatible base class + * The {@link RollupMerkleWitness} class defines a circuit-compatible base class * for [Merkle Witness'](https://computersciencewiki.org/index.php/Merkle_proof). */ class RollupMerkleWitness @@ -176,6 +178,11 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { return root.equals(calculatedRoot); } + public checkMembershipSimple(root: Field, value: Field): Bool { + const calculatedRoot = this.calculateRoot(value); + return root.equals(calculatedRoot); + } + public checkMembershipGetRoots( root: Field, key: Field, @@ -200,12 +207,11 @@ export function createMerkleTree(height: number): AbstractMerkleTreeClass { public static dummy() { return new RollupMerkleWitness({ - isLeft: Array(height - 1).fill(Bool(true)), - path: Array(height - 1).fill(Field(0)), + isLeft: Array(this.height - 1).fill(Bool(true)), + path: Array(this.height - 1).fill(Field(0)), }); } } - return class AbstractRollupMerkleTree implements AbstractMerkleTree { public static HEIGHT = height; @@ -349,7 +355,7 @@ export class RollupMerkleTreeWitness extends RollupMerkleTree.WITNESS {} * More efficient version of `maybeSwapBad` which * reuses an intermediate variable */ -function maybeSwap(b: Bool, x: Field, y: Field): [Field, Field] { +export function maybeSwap(b: Bool, x: Field, y: Field): [Field, Field] { const m = b.toField().mul(x.sub(y)); // b*(x - y) const x1 = y.add(m); // y + b*(x - y) const y2 = x.sub(m); // x - b*(x - y) = x + b*(y - x) diff --git a/packages/common/src/utils.ts b/packages/common/src/utils.ts index 316be8184..0576aacb5 100644 --- a/packages/common/src/utils.ts +++ b/packages/common/src/utils.ts @@ -280,3 +280,18 @@ class ReferenceObject { export function createReference(initial: T): Reference { return new ReferenceObject(initial); } + +export namespace BigIntMath { + export function max(...args: bigint[]) { + return args.reduce((m, e) => (e > m ? e : m)); + } +} + +export function assertDefined( + t: T | undefined, + msg?: string +): asserts t is T { + if (t === undefined) { + throw new Error(msg ?? "Value is undefined"); + } +} diff --git a/packages/common/test/trees/LinkedMerkleTree.test.ts b/packages/common/test/trees/LinkedMerkleTree.test.ts new file mode 100644 index 000000000..440fe4caa --- /dev/null +++ b/packages/common/test/trees/LinkedMerkleTree.test.ts @@ -0,0 +1,124 @@ +import { beforeEach } from "@jest/globals"; +import { Field, Poseidon } from "o1js"; + +import { + createLinkedMerkleTree, + InMemoryLinkedLeafStore, + InMemoryMerkleTreeStorage, + log, +} from "../../src"; +import { expectDefined } from "../../dist/utils"; + +describe.each([4, 16, 254])("cachedMerkleTree - %s", (height) => { + class LinkedMerkleTree extends createLinkedMerkleTree(height) {} + + let leafStore: InMemoryLinkedLeafStore; + let merkleStore: InMemoryMerkleTreeStorage; + let tree: LinkedMerkleTree; + + beforeEach(() => { + log.setLevel("INFO"); + + leafStore = new InMemoryLinkedLeafStore(); + merkleStore = new InMemoryMerkleTreeStorage(); + tree = new LinkedMerkleTree(merkleStore, leafStore); + }); + + it("should have the same root when empty", () => { + expect.assertions(2); + + expect(tree.getRoot().toString()).toStrictEqual( + LinkedMerkleTree.EMPTY_ROOT.toString() + ); + expectDefined(tree.getLeaf(0n)); + }); + + it("should have a different root when not empty", () => { + expect.assertions(1); + + tree.setLeaf(1n, 1n); + + expect(tree.getRoot().toString()).not.toStrictEqual( + LinkedMerkleTree.EMPTY_ROOT.toString() + ); + }); + + it("should provide correct witnesses", () => { + expect.assertions(2); + + tree.setLeaf(1n, 1n); + tree.setLeaf(5n, 5n); + + const witness = tree.getReadWitness(5n); + + expect(witness.leaf.value.toString()).toStrictEqual("5"); + expect( + witness.merkleWitness.calculateRoot(witness.leaf.hash()).toString() + ).toStrictEqual(tree.getRoot().toString()); + }); + + it("should have invalid witnesses with wrong values", () => { + expect.assertions(1); + + tree.setLeaf(1n, 1n); + tree.setLeaf(5n, 5n); + + const witness = tree.getReadWitness(5n); + + expect( + witness.merkleWitness.calculateRoot(Field(6)).toString() + ).not.toStrictEqual(tree.getRoot().toString()); + }); + + it("should have valid witnesses with changed value on the same leafs", () => { + expect.assertions(1); + + tree.setLeaf(1n, 1n); + tree.setLeaf(5n, 5n); + + const witness = tree.getReadWitness(5n); + + tree.setLeaf(5n, 10n); + + expect( + witness.merkleWitness + .calculateRoot( + Poseidon.hash([Field(10), witness.leaf.path, witness.leaf.nextPath]) + ) + .toString() + ).toStrictEqual(tree.getRoot().toString()); + }); + + it("should return zeroNode", () => { + expect.assertions(4); + const MAX_FIELD_VALUE: bigint = Field.ORDER - 1n; + const zeroLeaf = tree.getLeaf(0n); + expectDefined(zeroLeaf); + expect(zeroLeaf.value.toString()).toStrictEqual("0"); + expect(zeroLeaf.path.toString()).toStrictEqual("0"); + expect(zeroLeaf.nextPath.toString()).toStrictEqual( + MAX_FIELD_VALUE.toString() + ); + }); +}); + +// Separate describe here since we only want small trees for this test. +describe("Error check", () => { + class LinkedMerkleTree extends createLinkedMerkleTree(4) {} + let leafStore: InMemoryLinkedLeafStore; + let merkleStore: InMemoryMerkleTreeStorage; + let tree: LinkedMerkleTree; + + it("throw for invalid index", () => { + log.setLevel("INFO"); + + leafStore = new InMemoryLinkedLeafStore(); + merkleStore = new InMemoryMerkleTreeStorage(); + tree = new LinkedMerkleTree(merkleStore, leafStore); + expect(() => { + for (let i = 0; i < 2n ** BigInt(4) + 1n; i++) { + tree.setLeaf(BigInt(i), 2n); + } + }).toThrow("Index greater than maximum leaf number"); + }); +}); diff --git a/packages/common/test/trees/LinkedMerkleTreeCircuitOps.test.ts b/packages/common/test/trees/LinkedMerkleTreeCircuitOps.test.ts new file mode 100644 index 000000000..cce9a2707 --- /dev/null +++ b/packages/common/test/trees/LinkedMerkleTreeCircuitOps.test.ts @@ -0,0 +1,147 @@ +import { Field, Provable } from "o1js"; + +import { + InMemoryLinkedLeafStore, + InMemoryMerkleTreeStorage, + LinkedMerkleTree, + LinkedMerkleTreeCircuitOps, + LinkedMerkleTreeWitness, +} from "../../src"; + +describe("LinkedMerkleTree - Circuit Ops", () => { + function setupTree() { + const leafStore = new InMemoryLinkedLeafStore(); + const store = new InMemoryMerkleTreeStorage(); + return new LinkedMerkleTree(store, leafStore); + } + + let tree: LinkedMerkleTree; + + beforeEach(() => { + tree = setupTree(); + }); + + it("should correctly verify insert witness", () => { + try { + const root = tree.getRoot(); + const insertWitness = tree.setLeaf(5n, 1000n); + + const globalState = LinkedMerkleTreeCircuitOps.applyTreeWrite( + root, + insertWitness, + { + path: Field(5), + from: Field(0), + to: Field(1000), + }, + 0 + ); + + expect(globalState.toString()).toStrictEqual(tree.getRoot().toString()); + } catch (e) { + console.error(e); + throw e; + } + }); + + it("should correctly verify update witness", () => { + try { + tree.setLeaf(5n, 1000n); + tree.setLeaf(10n, 1500n); + + const root = tree.getRoot(); + + const updateWitness = tree.setLeaf(10n, 500n); + + const globalState = LinkedMerkleTreeCircuitOps.applyTreeWrite( + root, + updateWitness, + { + path: Field(10), + from: Field(1500), + to: Field(500), + }, + 0 + ); + + expect(globalState.toString()).toStrictEqual(tree.getRoot().toString()); + } catch (e) { + console.error(e); + throw e; + } + }); + + it("should not update root when only reading", () => { + tree.setLeaf(5n, 1000n); + tree.setLeaf(10n, 1500n); + + const root = tree.getRoot(); + + const updateWitness = tree.getReadWitness(10n); + + const globalState = LinkedMerkleTreeCircuitOps.applyTreeWrite( + root, + LinkedMerkleTreeWitness.fromReadWitness(updateWitness), + { + path: Field(10), + from: Field(1500), + to: Field(1500), + }, + 0 + ); + + expect(globalState.toString()).toStrictEqual(root.toString()); + expect(globalState.toString()).toStrictEqual(tree.getRoot().toString()); + }); + + it("should noop when used with a dummy witness", () => { + tree.setLeaf(5n, 1000n); + tree.setLeaf(10n, 1500n); + + const root = tree.getRoot(); + + const globalState = LinkedMerkleTreeCircuitOps.applyTreeWrite( + root, + LinkedMerkleTree.dummyWitness(), + { + path: Field(0), + from: Field(0), + to: Field(0), + }, + 0 + ); + + expect(globalState.toString()).toStrictEqual(root.toString()); + expect(globalState.toString()).toStrictEqual(tree.getRoot().toString()); + }); + + it("Circuit size", async () => { + const root = tree.getRoot(); + + const updateWitness = tree.setLeaf(10n, 500n); + + const cs = await Provable.constraintSystem(() => { + const rootWitness = Provable.witness(Field, () => root); + const updateWitnessWitness = Provable.witness( + LinkedMerkleTreeWitness, + () => updateWitness + ); + const treeWrite = { + path: Provable.witness(Field, () => 1), + from: Provable.witness(Field, () => 1), + to: Provable.witness(Field, () => 1), + }; + + LinkedMerkleTreeCircuitOps.applyTreeWrite( + rootWitness, + updateWitnessWitness, + treeWrite, + 0 + ); + }); + + console.log(cs.rows); + + expect(cs.rows).toBeLessThan(2500); + }); +}); diff --git a/packages/persistance/prisma/migrations/20250411162042_linked_leaves/migration.sql b/packages/persistance/prisma/migrations/20250411162042_linked_leaves/migration.sql new file mode 100644 index 000000000..3893fbe5e --- /dev/null +++ b/packages/persistance/prisma/migrations/20250411162042_linked_leaves/migration.sql @@ -0,0 +1,25 @@ +-- CreateTable +CREATE TABLE "LinkedLeaf" ( + "index" DECIMAL(78,0) NOT NULL, + "path" DECIMAL(78,0) NOT NULL, + "value" DECIMAL(78,0) NOT NULL, + "nextPath" DECIMAL(78,0) NOT NULL, + "mask" TEXT NOT NULL, + + CONSTRAINT "LinkedLeaf_pkey" PRIMARY KEY ("index","mask") +); + +-- CreateIndex +CREATE INDEX "LinkedLeaf_index_idx" ON "LinkedLeaf"("index"); + +-- CreateIndex +CREATE INDEX "LinkedLeaf_path_idx" ON "LinkedLeaf"("path"); + +-- CreateIndex +CREATE INDEX "LinkedLeaf_nextPath_idx" ON "LinkedLeaf"("nextPath"); + +-- CreateIndex +CREATE INDEX "State_path_idx" ON "State" USING HASH ("path"); + +-- CreateIndex +CREATE INDEX "State_mask_path_idx" ON "State"("mask", "path"); diff --git a/packages/persistance/prisma/schema.prisma b/packages/persistance/prisma/schema.prisma index 58c214477..17aa71d5f 100644 --- a/packages/persistance/prisma/schema.prisma +++ b/packages/persistance/prisma/schema.prisma @@ -19,6 +19,23 @@ model State { mask String @db.VarChar(256) @@id([path, mask]) + // We only need equality for the path + @@index([path], type: Hash) + @@index([mask, path], type: BTree) +} + +model LinkedLeaf { + index Decimal @db.Decimal(78, 0) + path Decimal @db.Decimal(78, 0) + value Decimal @db.Decimal(78, 0) + nextPath Decimal @db.Decimal(78, 0) + mask String + + @@id([index, mask]) + // We need a btree here, because we need to support queries with < and > for finding previous lmt leaves for inserting + @@index([index], type: BTree) + @@index([path], type: BTree) + @@index([nextPath], type: BTree) } model Transaction { diff --git a/packages/persistance/src/PrismaDatabaseConnection.ts b/packages/persistance/src/PrismaDatabaseConnection.ts index 234f7977a..84440ca1c 100644 --- a/packages/persistance/src/PrismaDatabaseConnection.ts +++ b/packages/persistance/src/PrismaDatabaseConnection.ts @@ -1,4 +1,4 @@ -import { PrismaClient } from "@prisma/client"; +import { Prisma, PrismaClient } from "@prisma/client"; import { sequencerModule, SequencerModule, @@ -28,6 +28,7 @@ export interface PrismaDatabaseConfig { }; } | string; + log?: (Prisma.LogLevel | Prisma.LogDefinition)[]; } export interface PrismaConnection { @@ -54,7 +55,7 @@ export class PrismaDatabaseConnection public dependencies(): OmitKeys< StorageDependencyMinimumDependencies, - "asyncMerkleStore" | "blockTreeStore" | "unprovenMerkleStore" + "blockTreeStore" | "asyncLinkedLeafStore" | "unprovenLinkedLeafStore" > { return { asyncStateService: { @@ -95,6 +96,7 @@ export class PrismaDatabaseConnection "Settlement", "IncomingMessageBatch", "IncomingMessageBatchTransaction", + "LinkedLeaf", ]; await this.prismaClient.$transaction( @@ -133,6 +135,7 @@ export class PrismaDatabaseConnection url, }, }, + log: this.config.log, }); } else { this.initializedClient = new PrismaClient(); diff --git a/packages/persistance/src/PrismaRedisDatabase.ts b/packages/persistance/src/PrismaRedisDatabase.ts index 92a68d0b5..c13b2ff91 100644 --- a/packages/persistance/src/PrismaRedisDatabase.ts +++ b/packages/persistance/src/PrismaRedisDatabase.ts @@ -22,6 +22,7 @@ import { RedisConnectionModule, RedisTransaction, } from "./RedisConnection"; +import { PrismaLinkedLeafStore } from "./services/prisma/PrismaLinkedLeafStore"; export interface PrismaRedisCombinedConfig { prisma: PrismaDatabaseConfig; @@ -38,7 +39,7 @@ export class PrismaRedisDatabase public redis: RedisConnectionModule; - public constructor(@inject("Tracer") tracer: Tracer) { + public constructor(@inject("Tracer") private readonly tracer: Tracer) { super(); this.prisma = new PrismaDatabaseConnection(tracer); this.redis = new RedisConnectionModule(tracer); @@ -66,6 +67,28 @@ export class PrismaRedisDatabase return { ...this.prisma.dependencies(), ...this.redis.dependencies(), + + asyncLinkedLeafStore: { + useFactory: () => { + return new PrismaLinkedLeafStore( + this.prisma, + this.redis, + this.tracer, + "batch" + ); + }, + }, + + unprovenLinkedLeafStore: { + useFactory: () => { + return new PrismaLinkedLeafStore( + this.prisma, + this.redis, + this.tracer, + "block" + ); + }, + }, }; } diff --git a/packages/persistance/src/index.ts b/packages/persistance/src/index.ts index 208211217..788de3d57 100644 --- a/packages/persistance/src/index.ts +++ b/packages/persistance/src/index.ts @@ -15,3 +15,4 @@ export * from "./services/prisma/mappers/StateTransitionMapper"; export * from "./services/prisma/mappers/TransactionMapper"; export * from "./services/prisma/mappers/BlockResultMapper"; export * from "./services/redis/RedisMerkleTreeStore"; +export * from "./services/prisma/PrismaLinkedLeafStore"; diff --git a/packages/persistance/src/services/prisma/PrismaLinkedLeafStore.ts b/packages/persistance/src/services/prisma/PrismaLinkedLeafStore.ts new file mode 100644 index 000000000..1761854af --- /dev/null +++ b/packages/persistance/src/services/prisma/PrismaLinkedLeafStore.ts @@ -0,0 +1,172 @@ +import { noop, StoredLeaf } from "@proto-kit/common"; +import { AsyncLinkedLeafStore, Tracer } from "@proto-kit/sequencer"; +import { injectable } from "tsyringe"; +import { Prisma } from "@prisma/client"; + +import { PrismaConnection } from "../../PrismaDatabaseConnection"; +import { RedisMerkleTreeStore } from "../redis/RedisMerkleTreeStore"; +import { RedisConnection } from "../../RedisConnection"; + +import { Decimal } from "./PrismaStateService"; + +@injectable() +export class PrismaLinkedLeafStore implements AsyncLinkedLeafStore { + private cache: StoredLeaf[] = []; + + private readonly redisMerkleStore: RedisMerkleTreeStore; + + public constructor( + private readonly connection: PrismaConnection, + redisConnection: RedisConnection, + tracer: Tracer, + private readonly mask: string = "base" + ) { + this.redisMerkleStore = new RedisMerkleTreeStore( + redisConnection, + tracer, + mask + ); + } + + public get treeStore() { + return this.redisMerkleStore; + } + + private assertCacheEmpty() { + if (this.cache.length > 0) { + throw new Error("For this operation, the cache must be empty"); + } + } + + public async openTransaction(): Promise { + noop(); + } + + public async commit(): Promise { + if (this.cache.length > 0) { + const data = this.cache.map((entry) => ({ + path: entry.leaf.path.toString(), + value: entry.leaf.value.toString(), + nextPath: entry.leaf.nextPath.toString(), + index: entry.index.toString(), + mask: this.mask, + })); + + await this.connection.prismaClient.linkedLeaf.deleteMany({ + where: { + path: { + in: data.map((entry) => entry.path), + }, + mask: this.mask, + }, + }); + + await this.connection.prismaClient.linkedLeaf.createMany({ + data, + skipDuplicates: false, + }); + + this.cache = []; + } + } + + public writeLeaves(leaves: StoredLeaf[]) { + this.cache = this.cache.concat(leaves); + } + + public async getLeavesAsync(paths: bigint[]) { + this.assertCacheEmpty(); + + if (paths.length > 0) { + const pathsDecimal = paths.map((path) => new Decimal(path.toString(10))); + const records = await this.connection.prismaClient.linkedLeaf.findMany({ + where: { + path: { + in: pathsDecimal, + }, + mask: this.mask, + }, + }); + + const stack = records + .map((record) => { + return { + index: BigInt(record.index.toFixed()), + leaf: { + path: BigInt(record.path.toFixed()), + value: BigInt(record.value.toFixed()), + nextPath: BigInt(record.nextPath.toFixed()), + }, + }; + }) + .reverse(); + + // TODO this runs in O(n^2), find a better matching algorithm for this (ordering?) + return paths.map((path) => { + return stack.find((candidate) => candidate.leaf.path === path); + }); + } + return []; + } + + public async getMaximumIndexAsync() { + this.assertCacheEmpty(); + + const result = await this.connection.prismaClient.linkedLeaf.aggregate({ + where: { + mask: this.mask, + }, + _max: { + index: true, + }, + }); + const maximumIndexString = result._max.index?.toFixed(); + return maximumIndexString !== undefined + ? BigInt(maximumIndexString) + : undefined; + } + + public async getPreviousLeavesAsync(paths: bigint[]) { + this.assertCacheEmpty(); + + const pathsDecimals = paths.map((path) => new Decimal(path.toString(10))); + type LinkedLeafQueryResult = { + index: Prisma.Decimal; + path: Prisma.Decimal; + nextPath: Prisma.Decimal; + value: Prisma.Decimal; + mask: string; + }; + const result = await this.connection.prismaClient.$queryRaw< + ({ + query_path: Prisma.Decimal; + } & LinkedLeafQueryResult)[] + >` + SELECT * FROM "LinkedLeaf" l + RIGHT JOIN (SELECT unnest(ARRAY[${pathsDecimals}]) as query_path) f + ON l.path < f.query_path AND l."nextPath" > f.query_path + WHERE l.mask = ${this.mask} + `; + + const map: Record = Object.fromEntries( + result.map((obj) => { + return [obj.query_path.toFixed(), obj]; + }) + ); + + return paths.map((path) => { + const record = map[path.toString()]; + if (record !== undefined) { + return { + index: BigInt(record.index.toFixed()), + leaf: { + path: BigInt(record.path.toFixed()), + value: BigInt(record.value.toFixed()), + nextPath: BigInt(record.nextPath.toFixed()), + }, + }; + } + return undefined; + }); + } +} diff --git a/packages/persistance/src/services/prisma/PrismaStateService.ts b/packages/persistance/src/services/prisma/PrismaStateService.ts index 73881127d..1aed65a38 100644 --- a/packages/persistance/src/services/prisma/PrismaStateService.ts +++ b/packages/persistance/src/services/prisma/PrismaStateService.ts @@ -13,7 +13,7 @@ import type { PrismaConnection } from "../../PrismaDatabaseConnection"; // We need to create a correctly configured Decimal constructor // with our parameters -const Decimal = Prisma.Decimal.clone({ +export const Decimal = Prisma.Decimal.clone({ precision: 78, }); @@ -36,25 +36,27 @@ export class PrismaStateService implements AsyncStateService { public async commit(): Promise { const { prismaClient } = this.connection; - const data = this.cache - .filter((entry) => entry.value !== undefined) - .map((entry) => ({ - path: new Decimal(entry.key.toString()), - values: entry.value!.map((field) => new Decimal(field.toString())), - mask: this.mask, - })); + if (this.cache.length > 0) { + const data = this.cache + .filter((entry) => entry.value !== undefined) + .map((entry) => ({ + path: new Decimal(entry.key.toString()), + values: entry.value!.map((field) => new Decimal(field.toString())), + mask: this.mask, + })); - await prismaClient.state.deleteMany({ - where: { - path: { - in: this.cache.map((x) => new Decimal(x.key.toString())), + await prismaClient.state.deleteMany({ + where: { + path: { + in: this.cache.map((x) => new Decimal(x.key.toString())), + }, + mask: this.mask, }, - mask: this.mask, - }, - }); - await prismaClient.state.createMany({ - data, - }); + }); + await prismaClient.state.createMany({ + data, + }); + } this.cache = []; } diff --git a/packages/persistance/test-integration/PrismaBlockProduction.test.ts b/packages/persistance/test-integration/PrismaBlockProduction.test.ts index 61eb02d3c..dcf69c648 100644 --- a/packages/persistance/test-integration/PrismaBlockProduction.test.ts +++ b/packages/persistance/test-integration/PrismaBlockProduction.test.ts @@ -1,16 +1,18 @@ import "reflect-metadata"; import { afterAll, beforeAll, describe, expect } from "@jest/globals"; -import { expectDefined } from "@proto-kit/common"; +import { expectDefined, log } from "@proto-kit/common"; import { BalancesKey, TokenId } from "@proto-kit/library"; import { NetworkState } from "@proto-kit/protocol"; import { AppChainTransaction } from "@proto-kit/sdk"; import { Block, Batch } from "@proto-kit/sequencer"; import { PrivateKey, PublicKey } from "o1js"; import { container } from "tsyringe"; +import { testBlockProduction } from "@proto-kit/sequencer/test/integration/BlockProduction-test"; import { PrismaBatchStore, PrismaBlockStorage, + PrismaRedisDatabase, PrismaTransactionStorage, } from "../src"; @@ -20,12 +22,25 @@ import { prepareBlock, } from "./utils"; +describe("Prisma block production", () => { + const { prismaConfig, redisConfig } = IntegrationTestDBConfig; + testBlockProduction(PrismaRedisDatabase, { + prisma: { + connection: prismaConfig, + log: [{ level: "query", emit: "event" }], + }, + redis: redisConfig, + }); +}); + describe("prisma integration", () => { let appChain: ReturnType; const sender = PrivateKey.random(); let senderNonce = 0; + log.setLevel("TRACE"); + const setup = async () => { const { prismaConfig, redisConfig } = IntegrationTestDBConfig; appChain = createPrismaAppchain(prismaConfig, redisConfig); @@ -38,10 +53,6 @@ describe("prisma integration", () => { await appChain.start(false, container.createChildContainer()); - const db = appChain.sequencer.resolve("Database"); - await db.prisma.pruneDatabase(); - await db.redis.pruneDatabase(); - senderNonce = 0; }; diff --git a/packages/persistance/test-integration/SequencerRestart.test.ts b/packages/persistance/test-integration/SequencerRestart.test.ts index 6619ed5ab..940bc40ca 100644 --- a/packages/persistance/test-integration/SequencerRestart.test.ts +++ b/packages/persistance/test-integration/SequencerRestart.test.ts @@ -1,6 +1,6 @@ import "reflect-metadata"; import { afterAll, beforeAll, expect } from "@jest/globals"; -import { expectDefined } from "@proto-kit/common"; +import { expectDefined, log } from "@proto-kit/common"; import { PrivateKey } from "o1js"; import { container } from "tsyringe"; @@ -18,8 +18,7 @@ describe("sequencer restart", () => { const clearDB = async () => { const db = appChain.sequencer.resolve("Database"); - await db.prisma.pruneDatabase(); - await db.redis.pruneDatabase(); + await db.pruneDatabase(); }; const setup = async () => { @@ -30,6 +29,11 @@ describe("sequencer restart", () => { Signer: { signer: sender, }, + Sequencer: { + DatabasePruneModule: { + pruneOnStartup: false, + }, + }, }); await appChain.start(false, container.createChildContainer()); @@ -40,6 +44,7 @@ describe("sequencer restart", () => { }; beforeAll(async () => { + log.setLevel("DEBUG"); await setup(); await clearDB(); diff --git a/packages/persistance/test-integration/utils.ts b/packages/persistance/test-integration/utils.ts index e42b336e8..a23be616c 100644 --- a/packages/persistance/test-integration/utils.ts +++ b/packages/persistance/test-integration/utils.ts @@ -32,6 +32,7 @@ import { BlockProducerModule, VanillaTaskWorkerModules, SequencerStartupModule, + DatabasePruneModule, } from "@proto-kit/sequencer"; import { Bool, PrivateKey, PublicKey, Struct } from "o1js"; @@ -103,6 +104,7 @@ export function createPrismaAppchain( }), Sequencer: Sequencer.from({ modules: { + DatabasePruneModule, Database: PrismaRedisDatabase, Mempool: PrivateMempool, @@ -153,6 +155,9 @@ export function createPrismaAppchain( simulatedDuration: 0, }, SequencerStartupModule: {}, + DatabasePruneModule: { + pruneOnStartup: true, + }, }, Signer: { signer: PrivateKey.random(), diff --git a/packages/protocol/src/model/StateTransitionProvableBatch.ts b/packages/protocol/src/model/StateTransitionProvableBatch.ts index c63aeb347..d08ab7ffb 100644 --- a/packages/protocol/src/model/StateTransitionProvableBatch.ts +++ b/packages/protocol/src/model/StateTransitionProvableBatch.ts @@ -1,5 +1,5 @@ import { Bool, Field, Provable, Struct } from "o1js"; -import { batch, RollupMerkleTreeWitness } from "@proto-kit/common"; +import { batch, LinkedMerkleTreeWitness } from "@proto-kit/common"; import { constants } from "../Constants"; @@ -53,7 +53,7 @@ export class ProvableStateTransitionType extends Struct({ export class MerkleWitnessBatch extends Struct({ witnesses: Provable.Array( - RollupMerkleTreeWitness, + LinkedMerkleTreeWitness, constants.stateTransitionProverBatchSize ), }) {} diff --git a/packages/protocol/src/prover/block/BlockProver.ts b/packages/protocol/src/prover/block/BlockProver.ts index 6362e7055..e66e34e22 100644 --- a/packages/protocol/src/prover/block/BlockProver.ts +++ b/packages/protocol/src/prover/block/BlockProver.ts @@ -504,13 +504,14 @@ export class BlockProverProgrammable extends ZkProgrammable< "Batcheshash doesn't start at 0" ); - // Assert from state roots + // Assert from state root assertEqualsIf( stateRoot, stateTransitionProof.publicInput.root, apply, errors.propertyNotMatching("from state root") ); + // Assert the stBatchesHash executed is the same assertEqualsIf( pendingSTBatchesHash, diff --git a/packages/protocol/src/prover/statetransition/StateTransitionProver.ts b/packages/protocol/src/prover/statetransition/StateTransitionProver.ts index 6adaf7e76..47932fad7 100644 --- a/packages/protocol/src/prover/statetransition/StateTransitionProver.ts +++ b/packages/protocol/src/prover/statetransition/StateTransitionProver.ts @@ -2,11 +2,13 @@ import { AreProofsEnabled, PlainZkProgram, provableMethod, - RollupMerkleTreeWitness, ZkProgrammable, CompilableModule, type ArtifactRecord, type CompileRegistry, + TreeWrite, + LinkedMerkleTreeCircuitOps, + LinkedMerkleTreeWitness, } from "@proto-kit/common"; import { Field, Provable, SelfProof, ZkProgram } from "o1js"; import { injectable } from "tsyringe"; @@ -143,6 +145,27 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< ]; } + private transitionToTreeWrite( + st: ProvableStateTransition, + witness: LinkedMerkleTreeWitness + ): TreeWrite { + // If from isSome isn't set, the user "ignored the previous value", i.e. we + // can assume the value in the witness is correct + const from = Provable.if( + st.from.isSome, + st.from.value, + witness.leafCurrent.leaf.value + ); + + // If the user doesn't want to write, we just carry over the from-value + const to = Provable.if(st.to.isSome, st.to.value, from); + return { + path: st.path, + from, + to, + }; + } + /** * Applies the state transitions to the current stateRoot * and returns the new prover state @@ -248,7 +271,7 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< public applyTransition( currentBatch: AppliedStateTransitionBatchState, transition: ProvableStateTransition, - witness: RollupMerkleTreeWitness, + witness: LinkedMerkleTreeWitness, index = 0 ) { const impliedRoot = this.applyTransitionToRoot( @@ -275,22 +298,17 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< private applyTransitionToRoot( transition: ProvableStateTransition, root: Field, - merkleWitness: RollupMerkleTreeWitness, + merkleWitness: LinkedMerkleTreeWitness, index: number ): Field { - const membershipValid = merkleWitness.checkMembership( + const treeWrite = this.transitionToTreeWrite(transition, merkleWitness); + + return LinkedMerkleTreeCircuitOps.applyTreeWrite( root, - transition.path, - transition.from.value + merkleWitness, + treeWrite, + index ); - - membershipValid - .or(transition.from.isSome.not()) - .assertTrue(errors.merkleWitnessNotCorrect(index)); - - const newRoot = merkleWitness.calculateRoot(transition.to.value); - - return Provable.if(transition.to.isSome, newRoot, root); } /** @@ -377,6 +395,7 @@ export class StateTransitionProverProgrammable extends ZkProgrammable< proof1.publicInput.root, errors.propertyNotMatching("root", "publicInput.from -> proof1.from") ); + proof1.publicOutput.root.assertEquals( proof2.publicInput.root, errors.propertyNotMatching("root", "proof1.to -> proof2.from") diff --git a/packages/protocol/src/settlement/contracts/BridgeContract.ts b/packages/protocol/src/settlement/contracts/BridgeContract.ts index 55048889a..eb2d7a94b 100644 --- a/packages/protocol/src/settlement/contracts/BridgeContract.ts +++ b/packages/protocol/src/settlement/contracts/BridgeContract.ts @@ -113,6 +113,8 @@ export abstract class BridgeContractBase extends TokenContractV2 { } public async updateStateRootBase(root: Field) { + // It's fine for us to only store the actual root since we only have to + // witness values, not update/insert this.stateRoot.set(root); const settlementContractAddress = diff --git a/packages/protocol/src/settlement/contracts/SettlementSmartContract.ts b/packages/protocol/src/settlement/contracts/SettlementSmartContract.ts index 675c4ce72..f1cb1db8b 100644 --- a/packages/protocol/src/settlement/contracts/SettlementSmartContract.ts +++ b/packages/protocol/src/settlement/contracts/SettlementSmartContract.ts @@ -1,9 +1,9 @@ import { prefixToField, - RollupMerkleTree, TypedClass, mapSequential, ChildVerificationKeyService, + LinkedMerkleTree, } from "@proto-kit/common"; import { AccountUpdate, @@ -256,7 +256,7 @@ export abstract class SettlementSmartContractBase extends TokenContractV2 { this.dispatchContractAddressX.getAndRequireEquals().assertEquals(Field(0)); this.sequencerKey.set(sequencer.x); - this.stateRoot.set(Field(RollupMerkleTree.EMPTY_ROOT)); + this.stateRoot.set(LinkedMerkleTree.EMPTY_ROOT); this.blockHashRoot.set(Field(BlockHashMerkleTree.EMPTY_ROOT)); this.networkStateHash.set(NetworkState.empty().hash()); this.dispatchContractAddressX.set(dispatchContract.x); diff --git a/packages/protocol/src/settlement/messages/OutgoingMessageArgument.ts b/packages/protocol/src/settlement/messages/OutgoingMessageArgument.ts index f7cf9ab2b..7a6a79995 100644 --- a/packages/protocol/src/settlement/messages/OutgoingMessageArgument.ts +++ b/packages/protocol/src/settlement/messages/OutgoingMessageArgument.ts @@ -1,17 +1,20 @@ import { Bool, Provable, Struct } from "o1js"; -import { RollupMerkleTreeWitness } from "@proto-kit/common"; +import { + LinkedMerkleTree, + LinkedMerkleTreeReadWitness, +} from "@proto-kit/common"; import { Withdrawal } from "./Withdrawal"; export const OUTGOING_MESSAGE_BATCH_SIZE = 1; export class OutgoingMessageArgument extends Struct({ - witness: RollupMerkleTreeWitness, + witness: LinkedMerkleTreeReadWitness, value: Withdrawal, }) { public static dummy(): OutgoingMessageArgument { return new OutgoingMessageArgument({ - witness: RollupMerkleTreeWitness.dummy(), + witness: LinkedMerkleTree.dummyReadWitness(), value: Withdrawal.dummy(), }); } diff --git a/packages/protocol/test/StateTransition.test.ts b/packages/protocol/test/StateTransition.test.ts index 936d2d85d..8d764b751 100644 --- a/packages/protocol/test/StateTransition.test.ts +++ b/packages/protocol/test/StateTransition.test.ts @@ -3,7 +3,7 @@ import { InMemoryMerkleTreeStorage } from "@proto-kit/common"; import { Bool, Field } from "o1js"; import { Option, ProvableStateTransition } from "../src/index"; -import { RollupMerkleTree } from "../../common/src/trees/RollupMerkleTree.js"; +import { RollupMerkleTree } from "../../common/src/trees/sparse/RollupMerkleTree.js"; // TODO Not worth fixing rn because we will revamp the STProver very soon diff --git a/packages/protocol/test/prover/statetransition/StateTransitionProver.test.ts b/packages/protocol/test/prover/statetransition/StateTransitionProver.test.ts index 930a2e49e..43cf92935 100644 --- a/packages/protocol/test/prover/statetransition/StateTransitionProver.test.ts +++ b/packages/protocol/test/prover/statetransition/StateTransitionProver.test.ts @@ -1,10 +1,10 @@ import { InMemoryAreProofsEnabled } from "@proto-kit/sdk"; import { Bool, Field } from "o1js"; import { + InMemoryLinkedLeafStore, InMemoryMerkleTreeStorage, + LinkedMerkleTree, padArray, - RollupMerkleTree, - RollupMerkleTreeWitness, } from "@proto-kit/common"; import { @@ -74,8 +74,13 @@ describe("StateTransitionProver", () => { }, ]); - const tree = new RollupMerkleTree(new InMemoryMerkleTreeStorage()); - const witness = tree.getWitness(1n); + const tree = new LinkedMerkleTree( + new InMemoryMerkleTreeStorage(), + new InMemoryLinkedLeafStore() + ); + const witness = LinkedMerkleTree.WITNESS.fromReadWitness( + tree.getReadWitness(1n) + ); const result = await prover.proveBatch( { @@ -87,7 +92,7 @@ describe("StateTransitionProver", () => { batch[0], { witnesses: padArray([witness], 4, () => - RollupMerkleTreeWitness.dummy() + LinkedMerkleTree.dummyWitness() ), }, new AppliedStateTransitionBatchState({ @@ -116,17 +121,17 @@ describe("StateTransitionProver", () => { const prove = async () => await prover.proveBatch( { - root: Field(RollupMerkleTree.EMPTY_ROOT), + root: Field(LinkedMerkleTree.EMPTY_ROOT), witnessedRootsHash: Field(0), batchesHash: Field(0), currentBatchStateHash: Field(0), }, batch[0], { - witnesses: padArray([], 4, RollupMerkleTreeWitness.dummy), + witnesses: padArray([], 4, () => LinkedMerkleTree.dummyWitness()), }, new AppliedStateTransitionBatchState({ - root: Field(RollupMerkleTree.EMPTY_ROOT), + root: Field(LinkedMerkleTree.EMPTY_ROOT), batchHash: Field(0), }) ); @@ -151,13 +156,20 @@ describe("StateTransitionProver", () => { }, ]); - const tree = new RollupMerkleTree(new InMemoryMerkleTreeStorage()); + const tree = new LinkedMerkleTree( + new InMemoryMerkleTreeStorage(), + new InMemoryLinkedLeafStore() + ); const inputRoot = tree.getRoot(); - const witness = tree.getWitness(1n); - tree.setLeaf(1n, Field(2)); - const witness2 = tree.getWitness(2n); + const witness = LinkedMerkleTree.WITNESS.fromReadWitness( + tree.getReadWitness(1n) + ); + tree.setLeaf(1n, 2n); + const witness2 = LinkedMerkleTree.WITNESS.fromReadWitness( + tree.getReadWitness(2n) + ); const prove = async () => await prover.proveBatch( @@ -171,9 +183,9 @@ describe("StateTransitionProver", () => { { witnesses: [ witness, - RollupMerkleTreeWitness.dummy(), + LinkedMerkleTree.dummyWitness(), witness2, - RollupMerkleTreeWitness.dummy(), + LinkedMerkleTree.dummyWitness(), ], }, new AppliedStateTransitionBatchState({ @@ -209,31 +221,40 @@ describe("StateTransitionProver", () => { }, ]); - const tree = new RollupMerkleTree(new InMemoryMerkleTreeStorage()); + const tree = new LinkedMerkleTree( + new InMemoryMerkleTreeStorage(), + new InMemoryLinkedLeafStore() + ); + + const witness1 = tree.setLeaf(1n, 2n); + const witness2 = tree.setLeaf(2n, 3n); + + const resultRoot = tree.getRoot(); - const witness1 = tree.getWitness(1n); - tree.setLeaf(1n, Field(2)); - const witness2 = tree.getWitness(2n); - tree.setLeaf(2n, Field(3)); + const witness3 = tree.setLeaf(2n, 4n); + const witness4 = tree.setLeaf(2n, 5n); const result = await prover.proveBatch( { - root: Field(RollupMerkleTree.EMPTY_ROOT), + root: Field(LinkedMerkleTree.EMPTY_ROOT), witnessedRootsHash: Field(0), batchesHash: Field(0), currentBatchStateHash: Field(0), }, batch[0], { - witnesses: [witness1, witness2, witness2, witness2], + witnesses: [witness1, witness2, witness3, witness4], + // .map((x) => + // LinkedMerkleTree.WITNESS.fromReadWitness(x) + // ), }, new AppliedStateTransitionBatchState({ - root: Field(RollupMerkleTree.EMPTY_ROOT), + root: Field(LinkedMerkleTree.EMPTY_ROOT), batchHash: Field(0), }) ); - expect(result.root.toString()).toStrictEqual(tree.getRoot().toString()); + expect(result.root.toString()).toStrictEqual(resultRoot.toString()); expect(result.currentBatchStateHash.toString()).toStrictEqual("0"); }); }); diff --git a/packages/sdk/src/graphql/GraphqlQueryTransportModule.ts b/packages/sdk/src/graphql/GraphqlQueryTransportModule.ts index 7a28965e8..a45a3871c 100644 --- a/packages/sdk/src/graphql/GraphqlQueryTransportModule.ts +++ b/packages/sdk/src/graphql/GraphqlQueryTransportModule.ts @@ -2,7 +2,7 @@ import { QueryTransportModule } from "@proto-kit/sequencer"; import { Field } from "o1js"; import { inject, injectable } from "tsyringe"; import { gql } from "@urql/core"; -import { RollupMerkleTreeWitness } from "@proto-kit/common"; +import { LinkedMerkleTreeReadWitness } from "@proto-kit/common"; import { AppChainModule } from "../appChain/AppChainModule"; @@ -64,12 +64,19 @@ export class GraphqlQueryTransportModule public async merkleWitness( key: Field - ): Promise { + ): Promise { const query = gql` query Witness($path: String!) { witness(path: $path) { - siblings - isLefts + leaf { + value + path + nextPath + } + merkleWitness { + siblings + isLefts + } } } `; @@ -87,21 +94,26 @@ export class GraphqlQueryTransportModule } if ( - witnessJson.siblings === undefined || - witnessJson.isLefts === undefined + witnessJson.leaf === undefined || + witnessJson.merkleWitness.siblings === undefined || + witnessJson.merkleWitness.isLefts === undefined ) { throw new Error("Witness json object malformed"); } - assertStringArray(witnessJson.siblings); - assertBooleanArray(witnessJson.isLefts); + assertStringArray(witnessJson.merkleWitness.siblings); + assertBooleanArray(witnessJson.merkleWitness.isLefts); - return new RollupMerkleTreeWitness( - RollupMerkleTreeWitness.fromJSON({ + return new LinkedMerkleTreeReadWitness( + LinkedMerkleTreeReadWitness.fromJSON({ // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - path: witnessJson.siblings, - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - isLeft: witnessJson.isLefts, + leaf: witnessJson.leaf, + merkleWitness: { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + path: witnessJson.merkleWitness.siblings, + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + isLeft: witnessJson.merkleWitness.isLefts, + }, }) ); } diff --git a/packages/sdk/src/query/StateServiceQueryModule.ts b/packages/sdk/src/query/StateServiceQueryModule.ts index ec78f44aa..d36a69593 100644 --- a/packages/sdk/src/query/StateServiceQueryModule.ts +++ b/packages/sdk/src/query/StateServiceQueryModule.ts @@ -1,14 +1,17 @@ import { AsyncStateService, - CachedMerkleTreeStore, QueryTransportModule, Sequencer, SequencerModulesRecord, - AsyncMerkleTreeStore, + CachedLinkedLeafStore, + AsyncLinkedLeafStore, } from "@proto-kit/sequencer"; import { Field } from "o1js"; import { inject, injectable } from "tsyringe"; -import { RollupMerkleTree, RollupMerkleTreeWitness } from "@proto-kit/common"; +import { + LinkedMerkleTreeReadWitness, + LinkedMerkleTree, +} from "@proto-kit/common"; import { AppChainModule } from "../appChain/AppChainModule"; @@ -29,8 +32,8 @@ export class StateServiceQueryModule ); } - public get treeStore(): AsyncMerkleTreeStore { - return this.sequencer.dependencyContainer.resolve("AsyncMerkleStore"); + public get treeStore(): AsyncLinkedLeafStore { + return this.sequencer.dependencyContainer.resolve("AsyncLinkedMerkleStore"); } public get(key: Field) { @@ -39,12 +42,12 @@ export class StateServiceQueryModule public async merkleWitness( path: Field - ): Promise { - const syncStore = new CachedMerkleTreeStore(this.treeStore); + ): Promise { + const syncStore = await CachedLinkedLeafStore.new(this.treeStore); await syncStore.preloadKey(path.toBigInt()); - const tree = new RollupMerkleTree(syncStore); + const tree = new LinkedMerkleTree(syncStore.treeStore, syncStore); - return tree.getWitness(path.toBigInt()); + return tree.getReadWitness(path.toBigInt()); } } diff --git a/packages/sequencer/src/helpers/query/QueryBuilderFactory.ts b/packages/sequencer/src/helpers/query/QueryBuilderFactory.ts index eb69ca104..08f65b7c7 100644 --- a/packages/sequencer/src/helpers/query/QueryBuilderFactory.ts +++ b/packages/sequencer/src/helpers/query/QueryBuilderFactory.ts @@ -1,4 +1,4 @@ -import { TypedClass, RollupMerkleTreeWitness } from "@proto-kit/common"; +import { LinkedMerkleTreeReadWitness, TypedClass } from "@proto-kit/common"; import { Runtime, RuntimeModule, @@ -24,13 +24,13 @@ export type PickByType = { export interface QueryGetterState { get: () => Promise; path: () => string; - merkleWitness: () => Promise; + merkleWitness: () => Promise; } export interface QueryGetterStateMap { get: (key: Key) => Promise; path: (key: Key) => string; - merkleWitness: (key: Key) => Promise; + merkleWitness: (key: Key) => Promise; } export type PickStateProperties = PickByType>; diff --git a/packages/sequencer/src/helpers/query/QueryTransportModule.ts b/packages/sequencer/src/helpers/query/QueryTransportModule.ts index eae3290c3..ce36eeec5 100644 --- a/packages/sequencer/src/helpers/query/QueryTransportModule.ts +++ b/packages/sequencer/src/helpers/query/QueryTransportModule.ts @@ -1,7 +1,9 @@ import { Field } from "o1js"; -import { RollupMerkleTreeWitness } from "@proto-kit/common"; +import { LinkedMerkleTreeReadWitness } from "@proto-kit/common"; export interface QueryTransportModule { get: (key: Field) => Promise; - merkleWitness: (key: Field) => Promise; + merkleWitness: ( + key: Field + ) => Promise; } diff --git a/packages/sequencer/src/index.ts b/packages/sequencer/src/index.ts index 476da4de4..4605b4f53 100644 --- a/packages/sequencer/src/index.ts +++ b/packages/sequencer/src/index.ts @@ -86,10 +86,13 @@ export * from "./helpers/query/NetworkStateTransportModule"; export * from "./state/prefilled/PreFilledStateService"; export * from "./state/async/AsyncMerkleTreeStore"; export * from "./state/async/AsyncStateService"; +export * from "./state/async/AsyncLinkedLeafStore"; export * from "./state/merkle/CachedMerkleTreeStore"; export * from "./state/merkle/SyncCachedMerkleTreeStore"; export * from "./state/state/DummyStateService"; export * from "./state/state/CachedStateService"; +export * from "./state/lmt/AsyncLinkedMerkleTreeDatabase"; +export * from "./state/lmt/CachedLinkedLeafStore"; export * from "./settlement/SettlementModule"; export * from "./settlement/messages/WithdrawalQueue"; export * from "./settlement/messages/IncomingMessageAdapter"; diff --git a/packages/sequencer/src/protocol/production/BatchProducerModule.ts b/packages/sequencer/src/protocol/production/BatchProducerModule.ts index fcfffa5b5..545190624 100644 --- a/packages/sequencer/src/protocol/production/BatchProducerModule.ts +++ b/packages/sequencer/src/protocol/production/BatchProducerModule.ts @@ -13,10 +13,10 @@ import { } from "../../sequencer/builder/SequencerModule"; import { BatchStorage } from "../../storage/repositories/BatchStorage"; import { SettleableBatch } from "../../storage/model/Batch"; -import { CachedMerkleTreeStore } from "../../state/merkle/CachedMerkleTreeStore"; -import { AsyncMerkleTreeStore } from "../../state/async/AsyncMerkleTreeStore"; import { BlockWithResult } from "../../storage/model/Block"; import type { Database } from "../../storage/Database"; +import { AsyncLinkedLeafStore } from "../../state/async/AsyncLinkedLeafStore"; +import { CachedLinkedLeafStore } from "../../state/lmt/CachedLinkedLeafStore"; import { BlockProofSerializer } from "./tasks/serializers/BlockProofSerializer"; import { BatchTracingService } from "./tracing/BatchTracingService"; @@ -26,7 +26,7 @@ export type StateRecord = Record; interface BatchMetadata { batch: SettleableBatch; - changes: CachedMerkleTreeStore; + changes: CachedLinkedLeafStore; } const errors = { @@ -47,8 +47,8 @@ export class BatchProducerModule extends SequencerModule { private productionInProgress = false; public constructor( - @inject("AsyncMerkleStore") - private readonly merkleStore: AsyncMerkleTreeStore, + @inject("AsyncLinkedLeafStore") + private readonly merkleStore: AsyncLinkedLeafStore, @inject("BatchStorage") private readonly batchStorage: BatchStorage, @inject("Database") private readonly database: Database, @@ -178,7 +178,7 @@ export class BatchProducerModule extends SequencerModule { batchId: number ): Promise<{ proof: Proof; - changes: CachedMerkleTreeStore; + changes: CachedLinkedLeafStore; fromNetworkState: NetworkState; toNetworkState: NetworkState; }> { @@ -186,7 +186,7 @@ export class BatchProducerModule extends SequencerModule { throw errors.blockWithoutTxs(); } - const merkleTreeStore = new CachedMerkleTreeStore(this.merkleStore); + const merkleTreeStore = await CachedLinkedLeafStore.new(this.merkleStore); const trace = await this.batchTraceService.traceBatch( blocks.map((block) => block), diff --git a/packages/sequencer/src/protocol/production/sequencing/BlockProducerModule.ts b/packages/sequencer/src/protocol/production/sequencing/BlockProducerModule.ts index a85fbcf8e..12722e3b4 100644 --- a/packages/sequencer/src/protocol/production/sequencing/BlockProducerModule.ts +++ b/packages/sequencer/src/protocol/production/sequencing/BlockProducerModule.ts @@ -26,6 +26,7 @@ import { Database } from "../../../storage/Database"; import { IncomingMessagesService } from "../../../settlement/messages/IncomingMessagesService"; import { Tracer } from "../../../logging/Tracer"; import { trace } from "../../../logging/trace"; +import { AsyncLinkedLeafStore } from "../../../state/async/AsyncLinkedLeafStore"; import { BlockProductionService } from "./BlockProductionService"; import { BlockResultService } from "./BlockResultService"; @@ -45,8 +46,8 @@ export class BlockProducerModule extends SequencerModule { private readonly messageService: IncomingMessagesService | undefined, @inject("UnprovenStateService") private readonly unprovenStateService: AsyncStateService, - @inject("UnprovenMerkleStore") - private readonly unprovenMerkleStore: AsyncMerkleTreeStore, + @inject("UnprovenLinkedLeafStore") + private readonly unprovenLinkedLeafStore: AsyncLinkedLeafStore, @inject("BlockQueue") private readonly blockQueue: BlockQueue, @inject("BlockTreeStore") @@ -118,7 +119,7 @@ export class BlockProducerModule extends SequencerModule { const { result, blockHashTreeStore, treeStore, stateService } = await this.resultService.generateMetadataForNextBlock( block, - this.unprovenMerkleStore, + this.unprovenLinkedLeafStore, this.blockTreeStore, this.unprovenStateService ); diff --git a/packages/sequencer/src/protocol/production/sequencing/BlockResultService.ts b/packages/sequencer/src/protocol/production/sequencing/BlockResultService.ts index 7e5803e7e..969a8e838 100644 --- a/packages/sequencer/src/protocol/production/sequencing/BlockResultService.ts +++ b/packages/sequencer/src/protocol/production/sequencing/BlockResultService.ts @@ -1,5 +1,5 @@ import { Bool, Field, Poseidon } from "o1js"; -import { RollupMerkleTree } from "@proto-kit/common"; +import { LinkedMerkleTree } from "@proto-kit/common"; import { AfterBlockHookArguments, BlockHashMerkleTree, @@ -27,10 +27,13 @@ import { AsyncStateService } from "../../../state/async/AsyncStateService"; import type { StateRecord } from "../BatchProducerModule"; import { trace } from "../../../logging/trace"; import { Tracer } from "../../../logging/Tracer"; +import { AsyncLinkedLeafStore } from "../../../state/async/AsyncLinkedLeafStore"; +import { CachedLinkedLeafStore } from "../../../state/lmt/CachedLinkedLeafStore"; import { executeWithExecutionContext } from "./TransactionExecutionService"; -function collectStateDiff( +// This is ordered, because javascript maintains the order based on time of first insertion +function collectOrderedStateDiff( stateTransitions: UntypedStateTransition[] ): StateRecord { return stateTransitions.reduce>( @@ -44,7 +47,7 @@ function collectStateDiff( ); } -function createCombinedStateDiff( +function createCombinedOrderedStateDiff( transactions: TransactionExecutionResult[], blockHookSTs: UntypedStateTransition[] ) { @@ -57,7 +60,7 @@ function createCombinedStateDiff( transitions.push(...blockHookSTs); - return collectStateDiff(transitions); + return collectOrderedStateDiff(transitions); }) .reduce((accumulator, diff) => { // accumulator properties will be overwritten by diff's values @@ -153,22 +156,23 @@ export class BlockResultService { } public async applyStateDiff( - store: CachedMerkleTreeStore, + store: CachedLinkedLeafStore, stateDiff: StateRecord - ): Promise { - await store.preloadKeys(Object.keys(stateDiff).map(BigInt)); + ): Promise { + const stateKeys = Object.keys(stateDiff); + await store.preloadKeys(stateKeys.map(BigInt)); // In case the diff is empty, we preload key 0 in order to // retrieve the root, which we need later - if (Object.keys(stateDiff).length === 0) { + if (stateKeys.length === 0) { await store.preloadKey(0n); } - const tree = new RollupMerkleTree(store); + const tree = new LinkedMerkleTree(store.treeStore, store); Object.entries(stateDiff).forEach(([key, state]) => { const treeValue = state !== undefined ? Poseidon.hash(state) : Field(0); - tree.setLeaf(BigInt(key), treeValue); + tree.setLeaf(BigInt(key), treeValue.toBigInt()); }); return tree; @@ -179,21 +183,21 @@ export class BlockResultService { })) public async generateMetadataForNextBlock( block: Block, - merkleTreeStore: AsyncMerkleTreeStore, + merkleTreeStore: AsyncLinkedLeafStore, blockHashTreeStore: AsyncMerkleTreeStore, stateService: AsyncStateService ): Promise<{ result: BlockResult; - treeStore: CachedMerkleTreeStore; + treeStore: CachedLinkedLeafStore; blockHashTreeStore: CachedMerkleTreeStore; stateService: CachedStateService; }> { - const combinedDiff = createCombinedStateDiff( + const combinedDiff = createCombinedOrderedStateDiff( block.transactions, block.beforeBlockStateTransitions ); - const inMemoryStore = new CachedMerkleTreeStore(merkleTreeStore); + const inMemoryStore = await CachedLinkedLeafStore.new(merkleTreeStore); const tree = await this.applyStateDiff(inMemoryStore, combinedDiff); @@ -220,7 +224,7 @@ export class BlockResultService { // Apply afterBlock STs to the tree const tree2 = await this.applyStateDiff( inMemoryStore, - collectStateDiff( + collectOrderedStateDiff( stateTransitions.map((stateTransition) => UntypedStateTransition.fromStateTransition(stateTransition) ) diff --git a/packages/sequencer/src/protocol/production/tasks/StateTransitionTask.ts b/packages/sequencer/src/protocol/production/tasks/StateTransitionTask.ts index 03303d3fa..b7281c649 100644 --- a/packages/sequencer/src/protocol/production/tasks/StateTransitionTask.ts +++ b/packages/sequencer/src/protocol/production/tasks/StateTransitionTask.ts @@ -15,7 +15,7 @@ import { log, ProvableMethodExecutionContext, CompileRegistry, - RollupMerkleTreeWitness, + LinkedMerkleTreeWitness, } from "@proto-kit/common"; import { Task, TaskSerializer } from "../../../worker/flow/Task"; @@ -28,7 +28,7 @@ export interface StateTransitionProofParameters { publicInput: StateTransitionProverPublicInput; batch: StateTransitionProvableBatch; batchState: AppliedStateTransitionBatchState; - merkleWitnesses: RollupMerkleTreeWitness[]; + merkleWitnesses: LinkedMerkleTreeWitness[]; } @injectable() diff --git a/packages/sequencer/src/protocol/production/tasks/serializers/StateTransitionParametersSerializer.ts b/packages/sequencer/src/protocol/production/tasks/serializers/StateTransitionParametersSerializer.ts index dd7966730..f88c80132 100644 --- a/packages/sequencer/src/protocol/production/tasks/serializers/StateTransitionParametersSerializer.ts +++ b/packages/sequencer/src/protocol/production/tasks/serializers/StateTransitionParametersSerializer.ts @@ -3,7 +3,7 @@ import { StateTransitionProvableBatch, StateTransitionProverPublicInput, } from "@proto-kit/protocol"; -import { RollupMerkleTreeWitness } from "@proto-kit/common"; +import { LinkedMerkleTreeWitness } from "@proto-kit/common"; import { TaskSerializer } from "../../../../worker/flow/Task"; import type { StateTransitionProofParameters } from "../StateTransitionTask"; @@ -11,7 +11,7 @@ import type { StateTransitionProofParameters } from "../StateTransitionTask"; interface StateTransitionParametersJSON { publicInput: ReturnType; batch: ReturnType; - merkleWitnesses: ReturnType[]; + merkleWitnesses: ReturnType[]; batchState: ReturnType; } @@ -27,7 +27,7 @@ export class StateTransitionParametersSerializer batch: StateTransitionProvableBatch.toJSON(parameters.batch), merkleWitnesses: parameters.merkleWitnesses.map((witness) => - RollupMerkleTreeWitness.toJSON(witness) + LinkedMerkleTreeWitness.toJSON(witness) ), batchState: AppliedStateTransitionBatchState.toJSON( @@ -49,7 +49,7 @@ export class StateTransitionParametersSerializer merkleWitnesses: parsed.merkleWitnesses.map( (witness) => - new RollupMerkleTreeWitness(RollupMerkleTreeWitness.fromJSON(witness)) + new LinkedMerkleTreeWitness(LinkedMerkleTreeWitness.fromJSON(witness)) ), batchState: new AppliedStateTransitionBatchState( diff --git a/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts b/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts index bf4f3877b..f54d52e6f 100644 --- a/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts +++ b/packages/sequencer/src/protocol/production/tracing/BatchTracingService.ts @@ -1,18 +1,17 @@ -import { yieldSequential } from "@proto-kit/common"; +import { log, yieldSequential } from "@proto-kit/common"; import { AppliedBatchHashList, MinaActionsHashList, TransactionHashList, WitnessedRootHashList, } from "@proto-kit/protocol"; -import { Field } from "o1js"; import { inject, injectable } from "tsyringe"; -import { CachedMerkleTreeStore } from "../../../state/merkle/CachedMerkleTreeStore"; import { StateTransitionProofParameters } from "../tasks/StateTransitionTask"; import { BlockWithResult } from "../../../storage/model/Block"; import { trace } from "../../../logging/trace"; import { Tracer } from "../../../logging/Tracer"; +import { CachedLinkedLeafStore } from "../../../state/lmt/CachedLinkedLeafStore"; import { BlockTrace, @@ -41,7 +40,7 @@ export class BatchTracingService { return { pendingSTBatches: new AppliedBatchHashList(), witnessedRoots: new WitnessedRootHashList(), - stateRoot: Field(block.block.fromStateRoot), + stateRoot: block.block.fromStateRoot, eternalTransactionsList: new TransactionHashList( block.block.fromEternalTransactionsHash ), @@ -52,6 +51,8 @@ export class BatchTracingService { @trace("batch.trace.blocks") public async traceBlocks(blocks: BlockWithResult[]) { + log.debug(`Tracing ${blocks.length} blocks...`); + const batchState = this.createBatchState(blocks[0]); // Trace blocks @@ -80,7 +81,7 @@ export class BatchTracingService { @trace("batch.trace.transitions") public async traceStateTransitions( blocks: BlockWithResult[], - merkleTreeStore: CachedMerkleTreeStore + merkleTreeStore: CachedLinkedLeafStore ) { const batches = await this.tracer.trace( "batch.trace.transitions.encoding", @@ -96,7 +97,7 @@ export class BatchTracingService { @trace("batch.trace", ([, , batchId]) => ({ batchId })) public async traceBatch( blocks: BlockWithResult[], - merkleTreeStore: CachedMerkleTreeStore, + merkleTreeStore: CachedLinkedLeafStore, // Only for trace metadata batchId: number ): Promise { diff --git a/packages/sequencer/src/protocol/production/tracing/StateTransitionTracingService.ts b/packages/sequencer/src/protocol/production/tracing/StateTransitionTracingService.ts index 3c00f69b2..4f0d2194b 100644 --- a/packages/sequencer/src/protocol/production/tracing/StateTransitionTracingService.ts +++ b/packages/sequencer/src/protocol/production/tracing/StateTransitionTracingService.ts @@ -1,5 +1,9 @@ import { Bool, Field } from "o1js"; -import { mapSequential, RollupMerkleTree } from "@proto-kit/common"; +import { + LinkedMerkleTree, + LinkedMerkleTreeWitness, + mapSequential, +} from "@proto-kit/common"; import { inject, injectable } from "tsyringe"; import { AppliedBatchHashList, @@ -15,11 +19,11 @@ import { import { distinctByString } from "../../../helpers/utils"; import { BlockWithResult } from "../../../storage/model/Block"; import { UntypedStateTransition } from "../helpers/UntypedStateTransition"; -import { CachedMerkleTreeStore } from "../../../state/merkle/CachedMerkleTreeStore"; import { StateTransitionProofParameters } from "../tasks/StateTransitionTask"; -import { SyncCachedMerkleTreeStore } from "../../../state/merkle/SyncCachedMerkleTreeStore"; import { trace } from "../../../logging/trace"; import { Tracer } from "../../../logging/Tracer"; +import { CachedLinkedLeafStore } from "../../../state/lmt/CachedLinkedLeafStore"; +import { SyncCachedLinkedLeafStore } from "../../../state/merkle/SyncCachedLinkedLeafStore"; export interface TracingStateTransitionBatch { stateTransitions: UntypedStateTransition[]; @@ -67,7 +71,7 @@ export class StateTransitionTracingService { @trace("batch.trace.transitions.merkle_trace") public async createMerkleTrace( - merkleStore: CachedMerkleTreeStore, + merkleStore: CachedLinkedLeafStore, stateTransitions: TracingStateTransitionBatch[] ) { const batches = StateTransitionProvableBatch.fromBatches( @@ -90,7 +94,7 @@ export class StateTransitionTracingService { } public async traceTransitions( - merkleStore: CachedMerkleTreeStore, + merkleStore: CachedLinkedLeafStore, batches: StateTransitionProvableBatch[] ): Promise { const keys = this.allKeys( @@ -101,9 +105,12 @@ export class StateTransitionTracingService { await merkleStore.preloadKeys(keys.map((key) => key.toBigInt())); - let batchMerkleStore = new SyncCachedMerkleTreeStore(merkleStore); + let batchMerkleStore = new SyncCachedLinkedLeafStore(merkleStore); - let tree = new RollupMerkleTree(batchMerkleStore); + let tree = new LinkedMerkleTree( + batchMerkleStore.treeStore, + batchMerkleStore + ); const initialRoot = tree.getRoot(); const batchList = new AppliedBatchHashList(Field(0)); @@ -135,17 +142,23 @@ export class StateTransitionTracingService { async (transitionInfo) => { const { stateTransition, type, witnessRoot } = transitionInfo; - const merkleWitness = tree.getWitness( - stateTransition.path.toBigInt() - ); + // const merkleWitness = tree.getWitness( + // stateTransition.path.toBigInt() + // ); + + let witness: LinkedMerkleTreeWitness; if (stateTransition.to.isSome.toBoolean()) { - tree.setLeaf( + witness = tree.setLeaf( stateTransition.path.toBigInt(), - stateTransition.to.value + stateTransition.to.value.toBigInt() ); danglingStateRoot = tree.getRoot(); + } else { + witness = LinkedMerkleTreeWitness.fromReadWitness( + tree.getReadWitness(stateTransition.path.toBigInt()) + ); } currentSTList.push(stateTransition); @@ -173,8 +186,11 @@ export class StateTransitionTracingService { danglingStateRoot = finalizedStateRoot; - batchMerkleStore = new SyncCachedMerkleTreeStore(merkleStore); - tree = new RollupMerkleTree(batchMerkleStore); + batchMerkleStore = new SyncCachedLinkedLeafStore(merkleStore); + tree = new LinkedMerkleTree( + batchMerkleStore.treeStore, + batchMerkleStore + ); } else { throw new Error("Unreachable"); } @@ -197,7 +213,7 @@ export class StateTransitionTracingService { ); } - return [merkleWitness, witnessRoot] as const; + return [witness, witnessRoot] as const; } ); diff --git a/packages/sequencer/src/sequencer/executor/Sequencer.ts b/packages/sequencer/src/sequencer/executor/Sequencer.ts index 88e87b99e..bf41d9768 100644 --- a/packages/sequencer/src/sequencer/executor/Sequencer.ts +++ b/packages/sequencer/src/sequencer/executor/Sequencer.ts @@ -121,8 +121,10 @@ export class Sequencer await sequencerModule.start(); } - // TODO This currently also warns for client appchains - if (!moduleClassNames.includes("SequencerStartupModule")) { + if ( + !moduleClassNames.includes("SequencerStartupModule") && + moduleClassNames.includes("BatchProducerModule") + ) { log.warn("SequencerStartupModule is not defined."); } } diff --git a/packages/sequencer/src/settlement/BridgingModule.ts b/packages/sequencer/src/settlement/BridgingModule.ts index c4e407246..d67a839a4 100644 --- a/packages/sequencer/src/settlement/BridgingModule.ts +++ b/packages/sequencer/src/settlement/BridgingModule.ts @@ -29,9 +29,9 @@ import { import { AreProofsEnabled, filterNonUndefined, + LinkedMerkleTree, log, noop, - RollupMerkleTree, } from "@proto-kit/common"; import { match, Pattern } from "ts-pattern"; import { FungibleToken } from "mina-fungible-token"; @@ -40,10 +40,10 @@ import { SequencerModule, sequencerModule, } from "../sequencer/builder/SequencerModule"; -import { CachedMerkleTreeStore } from "../state/merkle/CachedMerkleTreeStore"; -import { AsyncMerkleTreeStore } from "../state/async/AsyncMerkleTreeStore"; import { FeeStrategy } from "../protocol/baselayer/fees/FeeStrategy"; import type { MinaBaseLayer } from "../protocol/baselayer/MinaBaseLayer"; +import { AsyncLinkedLeafStore } from "../state/async/AsyncLinkedLeafStore"; +import { CachedLinkedLeafStore } from "../state/lmt/CachedLinkedLeafStore"; import type { OutgoingMessageAdapter } from "./messages/WithdrawalQueue"; import type { SettlementModule } from "./SettlementModule"; @@ -75,8 +75,8 @@ export class BridgingModule extends SequencerModule { private readonly settlementModule: SettlementModule, @inject("OutgoingMessageQueue") private readonly outgoingMessageQueue: OutgoingMessageAdapter, - @inject("AsyncMerkleStore") - private readonly merkleTreeStore: AsyncMerkleTreeStore, + @inject("AsyncLinkedLeafStore") + private readonly linkedLeafStore: AsyncLinkedLeafStore, @inject("FeeStrategy") private readonly feeStrategy: FeeStrategy, @inject("AreProofsEnabled") areProofsEnabled: AreProofsEnabled, @@ -357,8 +357,8 @@ export class BridgingModule extends SequencerModule { const bridgeContract = this.createBridgeContract(bridgeAddress, tokenId); - const cachedStore = new CachedMerkleTreeStore(this.merkleTreeStore); - const tree = new RollupMerkleTree(cachedStore); + const cachedStore = await CachedLinkedLeafStore.new(this.linkedLeafStore); + const tree = new LinkedMerkleTree(cachedStore.treeStore, cachedStore); const [withdrawalModule, withdrawalStateName] = this.getBridgingModuleConfig().withdrawalStatePath.split("."); @@ -400,7 +400,7 @@ export class BridgingModule extends SequencerModule { await cachedStore.preloadKeys(keys.map((key) => key.toBigInt())); const transactionParamaters = batch.map((message, index) => { - const witness = tree.getWitness(keys[index].toBigInt()); + const witness = tree.getReadWitness(keys[index].toBigInt()); return new OutgoingMessageArgument({ witness, value: message.value, diff --git a/packages/sequencer/src/state/async/AsyncLinkedLeafStore.ts b/packages/sequencer/src/state/async/AsyncLinkedLeafStore.ts new file mode 100644 index 000000000..5d9dcc0fb --- /dev/null +++ b/packages/sequencer/src/state/async/AsyncLinkedLeafStore.ts @@ -0,0 +1,21 @@ +import { StoredLeaf } from "@proto-kit/common"; + +import { AsyncMerkleTreeStore } from "./AsyncMerkleTreeStore"; + +export interface AsyncLinkedLeafStore { + treeStore: AsyncMerkleTreeStore; + + openTransaction: () => Promise; + + commit: () => Promise; + + writeLeaves: (leaves: StoredLeaf[]) => void; + + getLeavesAsync: (paths: bigint[]) => Promise<(StoredLeaf | undefined)[]>; + + getMaximumIndexAsync: () => Promise; + + getPreviousLeavesAsync: ( + path: bigint[] + ) => Promise<(StoredLeaf | undefined)[]>; +} diff --git a/packages/sequencer/src/state/lmt/AsyncLinkedMerkleTreeDatabase.ts b/packages/sequencer/src/state/lmt/AsyncLinkedMerkleTreeDatabase.ts new file mode 100644 index 000000000..a8e2bd904 --- /dev/null +++ b/packages/sequencer/src/state/lmt/AsyncLinkedMerkleTreeDatabase.ts @@ -0,0 +1,7 @@ +import { AsyncMerkleTreeStore } from "../async/AsyncMerkleTreeStore"; +import { AsyncLinkedLeafStore } from "../async/AsyncLinkedLeafStore"; + +export interface AsyncLinkedMerkleTreeDatabase { + treeStore: AsyncMerkleTreeStore; + leafStore: AsyncLinkedLeafStore; +} diff --git a/packages/sequencer/src/state/lmt/CachedLinkedLeafStore.ts b/packages/sequencer/src/state/lmt/CachedLinkedLeafStore.ts new file mode 100644 index 000000000..b55c1b267 --- /dev/null +++ b/packages/sequencer/src/state/lmt/CachedLinkedLeafStore.ts @@ -0,0 +1,240 @@ +import { + InMemoryLinkedLeafStore, + LinkedLeaf, + LinkedLeafStore, + assertDefined, + StoredLeaf, + filterNonUndefined, +} from "@proto-kit/common"; +// eslint-disable-next-line import/no-extraneous-dependencies +import zip from "lodash/zip"; +// eslint-disable-next-line import/no-extraneous-dependencies +import groupBy from "lodash/groupBy"; + +import { AsyncLinkedLeafStore } from "../async/AsyncLinkedLeafStore"; +import { CachedMerkleTreeStore } from "../merkle/CachedMerkleTreeStore"; + +export class CachedLinkedLeafStore implements LinkedLeafStore { + private writeCache: { + [key: string]: StoredLeaf; + } = {}; + + private readonly leafStore = new InMemoryLinkedLeafStore(); + + private readonly treeCache: CachedMerkleTreeStore; + + private constructor(private readonly parent: AsyncLinkedLeafStore) { + this.treeCache = new CachedMerkleTreeStore(parent.treeStore); + } + + public get treeStore() { + return this.treeCache; + } + + public static async new( + parent: AsyncLinkedLeafStore + ): Promise { + const cachedInstance = new CachedLinkedLeafStore(parent); + await cachedInstance.preloadMaximumIndex(); + await cachedInstance.preloadZeroNode(); + return cachedInstance; + } + + public getLeaf(path: bigint) { + return this.leafStore.getLeaf(path); + } + + // This gets the leaves and the nodes from the in memory store. + // If the leaf is not in the in-memory store it goes to the parent (i.e. + // what's put in the constructor). + public async getLeavesAsync(paths: bigint[]) { + return await this.retrieveBatched( + paths, + (path) => this.getLeaf(path), + (remotePaths) => this.parent.getLeavesAsync(remotePaths) + ); + } + + public setLeaf(index: bigint, leaf: LinkedLeaf) { + this.writeCache[leaf.path.toString()] = { leaf: leaf, index: index }; + this.leafStore.setLeaf(index, leaf); + } + + // This is just used in the mergeIntoParent + public writeLeaves(leaves: StoredLeaf[]) { + leaves.forEach(({ leaf, index }) => { + this.setLeaf(index, leaf); + }); + } + + // This gets the leaves from the cache. + // Only used in mergeIntoParent + public getWrittenLeaves(): StoredLeaf[] { + return Object.values(this.writeCache); + } + + // This resets the cache (not the in memory tree). + public resetWrittenLeaves() { + this.writeCache = {}; + } + + protected async preloadZeroNode() { + if (this.leafStore.getLeaf(0n) === undefined) { + await this.preloadKey(0n); + } + } + + private async preloadMaximumIndex() { + // Preload maximumIndex before all others, to have a accurate index loaded already + // before setting a ny other leaves + if (this.leafStore.getMaximumIndex() === undefined) { + this.leafStore.maximumIndex = await this.parent.getMaximumIndexAsync(); + } + } + + async retrieveBatched( + inputs: Input[], + cache: (input: Input) => Element | undefined, + parent: (inputs: Input[]) => Promise<(Element | undefined)[]> + ) { + // The reason I built it using this weird closure-centric algorithm is that doing it + // purely functional would require a lot more array operations than this + const results: (Element | undefined)[] = Array.from({ + length: inputs.length, + }); + + const toFetchRemotely = inputs + .map((input, i) => { + const localResult = cache(input); + if (localResult !== undefined) { + results[i] = localResult; + return undefined; + } else { + return { path: input, index: i }; + } + }) + .filter(filterNonUndefined); + + let remoteResults: (Element | undefined)[] = []; + if (toFetchRemotely.length > 0) { + remoteResults = await parent(toFetchRemotely.map((value) => value.path)); + } + + zip(toFetchRemotely, remoteResults).forEach(([query, result]) => { + assertDefined(query); + + results[query.index] = result; + }); + + return results; + } + + // Takes a list of paths and for each key collects the relevant nodes from the + // parent tree and sets the leaf and node in the cached tree (and in-memory tree). + public async preloadKeysInternal(paths: bigint[]): Promise { + const leaves = await this.getLeavesAsync(paths); + + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + const zipped = zip(paths, leaves) as [bigint, StoredLeaf | undefined][]; + const groupedOps = groupBy(zipped, ([, leaf]) => + leaf !== undefined ? "update" : "insert" + ); + + const treeIndizesToFetch: bigint[] = []; + + if (groupedOps.update !== undefined) { + // Preload updates + const treeUpdates = groupedOps.update.map(([path, leaf]) => { + assertDefined(leaf); + + // Update case, this leaf is the only one we need + this.setLeaf(leaf.index, leaf.leaf); + + return leaf.index; + }); + treeIndizesToFetch.push(...treeUpdates); + } + + if (groupedOps.insert !== undefined) { + // Insert case, this leaf doesn't yet exist - we need to fetch the previous one + // Calling getLeafLessOrEqual assures that it is actually the leaf we want + // (i.e. pointing over our path) + const previousLeaves = await this.retrieveBatched( + groupedOps.insert.map(([path]) => path), + this.leafStore.getPreviousLeaf.bind(this.leafStore), + this.parent.getPreviousLeavesAsync.bind(this.parent) + ); + + // This is a check that all previous leaves have been found, with the + // one exception being when the tree is empty (see below) + const anyUndefined = + previousLeaves.findIndex((x) => x === undefined) > -1; + // eslint-disable-next-line sonarjs/no-collapsible-if + if (anyUndefined) { + // This only happens when the store is empty, because in this case, the tree + // initializes the 0-leaf, but this only happens after preloading. + if (this.leafStore.getLeaf(0n) === undefined) { + const [zeroLeaf] = await this.parent.getLeavesAsync([0n]); + if (zeroLeaf !== undefined) { + throw Error("Previous Leaf should never be empty"); + } + } + } + + const definedPreviousLeaves = previousLeaves.filter(filterNonUndefined); + + definedPreviousLeaves.forEach(({ index, leaf }) => + this.setLeaf(index, leaf) + ); + treeIndizesToFetch.push( + ...definedPreviousLeaves.map(({ index }) => index) + ); + + // Additionally preload the next empty tree index. + // This is enough, because we know that all subsequent empty tree indizes + // (in case there are multiple inserts) will be bigger than that index. + // In that case, everything will be either contained in the siblings of this index + // or be zero. So in either case, we don't have to preload more than we do here. + const maximumIndex = this.leafStore.getMaximumIndex() ?? -1n; + treeIndizesToFetch.push(maximumIndex + 1n); + } + + await this.treeCache.preloadKeys(treeIndizesToFetch); + } + + public async preloadKey(path: bigint) { + await this.preloadKeysInternal([path]); + } + + public async preloadKeys(paths: bigint[]): Promise { + await this.preloadKeysInternal(paths); + } + + // This merges the cache into the parent tree and resets the cache, but not the + // in-memory merkle tree. + public async mergeIntoParent(): Promise { + const leaves = this.getWrittenLeaves(); + // In case no state got set we can skip this step + if (leaves.length === 0) { + return; + } + + await this.parent.openTransaction(); + + this.parent.writeLeaves(Object.values(leaves)); + + await this.parent.commit(); + + await this.treeCache.mergeIntoParent(); + + this.resetWrittenLeaves(); + } + + public getPreviousLeaf(path: bigint) { + return this.leafStore.getPreviousLeaf(path); + } + + public getMaximumIndex() { + return this.leafStore.getMaximumIndex(); + } +} diff --git a/packages/sequencer/src/state/merkle/CachedMerkleTreeStore.ts b/packages/sequencer/src/state/merkle/CachedMerkleTreeStore.ts index 677668abd..fb819b27d 100644 --- a/packages/sequencer/src/state/merkle/CachedMerkleTreeStore.ts +++ b/packages/sequencer/src/state/merkle/CachedMerkleTreeStore.ts @@ -154,6 +154,11 @@ export class CachedMerkleTreeStore const toFetch: MerkleTreeNodeQuery[] = []; + // TODO Replace this logic with the following: + // Collect toFetch as { nodeQuery, arrayIndex } + // After fetching, set the data at the specific arrayIndizes + // Same goes for CachedLinkedLeafStore + nodes.forEach((node, index) => { const localResult = this.getNode(node.key, node.level); if (localResult !== undefined) { diff --git a/packages/sequencer/src/state/merkle/SyncCachedLinkedLeafStore.ts b/packages/sequencer/src/state/merkle/SyncCachedLinkedLeafStore.ts new file mode 100644 index 000000000..846d7fb03 --- /dev/null +++ b/packages/sequencer/src/state/merkle/SyncCachedLinkedLeafStore.ts @@ -0,0 +1,65 @@ +import { + LinkedLeaf, + InMemoryLinkedLeafStore, + LinkedLeafStore, + StoredLeaf, +} from "@proto-kit/common"; + +import { CachedLinkedLeafStore } from "../lmt/CachedLinkedLeafStore"; + +import { SyncCachedMerkleTreeStore } from "./SyncCachedMerkleTreeStore"; + +// This is mainly used for supporting the rollbacks we need to do in case a runtimemethod fails +// In this case everything should be preloaded in the parent async service +export class SyncCachedLinkedLeafStore implements LinkedLeafStore { + private readonly leafStore = new InMemoryLinkedLeafStore(); + + private readonly treeCache: SyncCachedMerkleTreeStore; + + public constructor(private readonly parent: CachedLinkedLeafStore) { + this.treeCache = new SyncCachedMerkleTreeStore(parent.treeStore); + this.leafStore.maximumIndex = parent.getMaximumIndex(); + } + + public get treeStore() { + return this.treeCache; + } + + public getLeaf(path: bigint): StoredLeaf | undefined { + return this.leafStore.getLeaf(path) ?? this.parent.getLeaf(path); + } + + public setLeaf(index: bigint, value: LinkedLeaf) { + this.leafStore.setLeaf(index, value); + } + + public getMaximumIndex(): bigint | undefined { + return ( + this.leafStore.getMaximumIndex() ?? this.parent.getMaximumIndex() ?? 0n + ); + } + + public getPreviousLeaf(path: bigint): StoredLeaf | undefined { + return ( + this.leafStore.getPreviousLeaf(path) ?? this.parent.getPreviousLeaf(path) + ); + } + + public async preloadKeys(path: bigint[]) { + await this.parent.preloadKeys(path); + } + + public mergeIntoParent() { + if (Object.keys(this.leafStore.leaves).length === 0) { + return; + } + + Object.values(this.leafStore.leaves).forEach(({ leaf, index }) => + this.parent.setLeaf(index, leaf) + ); + + this.leafStore.leaves = {}; + + this.treeCache.mergeIntoParent(); + } +} diff --git a/packages/sequencer/src/state/merkle/SyncCachedMerkleTreeStore.ts b/packages/sequencer/src/state/merkle/SyncCachedMerkleTreeStore.ts index a2e122bca..e6965d0eb 100644 --- a/packages/sequencer/src/state/merkle/SyncCachedMerkleTreeStore.ts +++ b/packages/sequencer/src/state/merkle/SyncCachedMerkleTreeStore.ts @@ -1,8 +1,4 @@ -import { - InMemoryMerkleTreeStorage, - MerkleTreeStore, - RollupMerkleTree, -} from "@proto-kit/common"; +import { InMemoryMerkleTreeStorage, MerkleTreeStore } from "@proto-kit/common"; export class SyncCachedMerkleTreeStore extends InMemoryMerkleTreeStorage { public constructor(private readonly parent: MerkleTreeStore) { @@ -24,11 +20,13 @@ export class SyncCachedMerkleTreeStore extends InMemoryMerkleTreeStorage { const { nodes } = this; - Array.from({ length: RollupMerkleTree.HEIGHT }).forEach((ignored, level) => - Object.entries(nodes[level]).forEach((entry) => { - this.parent.setNode(BigInt(entry[0]), level, entry[1]); - }) - ); + Object.keys(nodes) + .map((level) => parseInt(level, 10)) + .forEach((level) => + Object.entries(nodes[level]).forEach((entry) => { + this.parent.setNode(BigInt(entry[0]), level, entry[1]); + }) + ); this.nodes = {}; } diff --git a/packages/sequencer/src/storage/StorageDependencyFactory.ts b/packages/sequencer/src/storage/StorageDependencyFactory.ts index ed660d8fa..e53d3b0c9 100644 --- a/packages/sequencer/src/storage/StorageDependencyFactory.ts +++ b/packages/sequencer/src/storage/StorageDependencyFactory.ts @@ -5,6 +5,7 @@ import { } from "@proto-kit/common"; import { AsyncStateService } from "../state/async/AsyncStateService"; +import { AsyncLinkedLeafStore } from "../state/async/AsyncLinkedLeafStore"; import { AsyncMerkleTreeStore } from "../state/async/AsyncMerkleTreeStore"; import { BatchStorage } from "./repositories/BatchStorage"; @@ -15,12 +16,14 @@ import { TransactionStorage } from "./repositories/TransactionStorage"; export interface StorageDependencyMinimumDependencies extends DependencyRecord { asyncStateService: DependencyDeclaration; - asyncMerkleStore: DependencyDeclaration; + asyncLinkedLeafStore: DependencyDeclaration; + + unprovenStateService: DependencyDeclaration; + unprovenLinkedLeafStore: DependencyDeclaration; + batchStorage: DependencyDeclaration; blockQueue: DependencyDeclaration; blockStorage: DependencyDeclaration; - unprovenStateService: DependencyDeclaration; - unprovenMerkleStore: DependencyDeclaration; blockTreeStore: DependencyDeclaration; messageStorage: DependencyDeclaration; settlementStorage: DependencyDeclaration; diff --git a/packages/sequencer/src/storage/inmemory/InMemoryAsyncLinkedLeafStore.ts b/packages/sequencer/src/storage/inmemory/InMemoryAsyncLinkedLeafStore.ts new file mode 100644 index 000000000..382e2ca7a --- /dev/null +++ b/packages/sequencer/src/storage/inmemory/InMemoryAsyncLinkedLeafStore.ts @@ -0,0 +1,70 @@ +import { InMemoryLinkedLeafStore, LinkedLeaf, noop } from "@proto-kit/common"; + +import { AsyncLinkedLeafStore } from "../../state/async/AsyncLinkedLeafStore"; + +import { InMemoryAsyncMerkleTreeStore } from "./InMemoryAsyncMerkleTreeStore"; + +export class InMemoryAsyncLinkedLeafStore implements AsyncLinkedLeafStore { + private readonly leafStore = new InMemoryLinkedLeafStore(); + + private readonly nodeStore = new InMemoryAsyncMerkleTreeStore(); + + // public constructor() { + // const initialLeaf = initialLinkedLeaf(); + // this.leafStore.setLeaf(0n, initialLeaf); + // this.nodeStore.writeNodes([{ }]); + // } + + public get treeStore() { + return this.nodeStore; + } + + public async openTransaction(): Promise { + noop(); + } + + public async commit(): Promise { + noop(); + } + + // This is using the index/key + public writeLeaves(leaves: { leaf: LinkedLeaf; index: bigint }[]) { + leaves.forEach(({ leaf, index }) => { + this.leafStore.setLeaf(index, leaf); + }); + } + + public async getLeavesAsync(paths: bigint[]) { + return paths.map((path) => { + const leaf = this.leafStore.getLeaf(path); + if (leaf !== undefined) { + return leaf; + } + return undefined; + }); + } + + public async getMaximumIndexAsync() { + return this.leafStore.getMaximumIndex(); + } + + public async getPreviousLeavesAsync(paths: bigint[]) { + return paths.map((path) => this.leafStore.getPreviousLeaf(path)); + } + + public setLeaf(index: bigint, value: LinkedLeaf) { + this.leafStore.setLeaf(index, value); + } + + public getLeaf(path: bigint) { + return this.leafStore.getLeaf(path); + } + + public getPreviousLeaf(path: bigint) { + return this.leafStore.getPreviousLeaf(path); + } + + public getMaximumIndex() { + return this.leafStore.getMaximumIndex(); + } +} diff --git a/packages/sequencer/src/storage/inmemory/InMemoryDatabase.ts b/packages/sequencer/src/storage/inmemory/InMemoryDatabase.ts index a8eb330ab..5406d539f 100644 --- a/packages/sequencer/src/storage/inmemory/InMemoryDatabase.ts +++ b/packages/sequencer/src/storage/inmemory/InMemoryDatabase.ts @@ -10,19 +10,20 @@ import { Database } from "../Database"; import { closeable } from "../../sequencer/builder/Closeable"; import { InMemoryBlockStorage } from "./InMemoryBlockStorage"; -import { InMemoryAsyncMerkleTreeStore } from "./InMemoryAsyncMerkleTreeStore"; +import { InMemoryAsyncLinkedLeafStore } from "./InMemoryAsyncLinkedLeafStore"; import { InMemoryBatchStorage } from "./InMemoryBatchStorage"; import { InMemoryMessageStorage } from "./InMemoryMessageStorage"; import { InMemorySettlementStorage } from "./InMemorySettlementStorage"; import { InMemoryTransactionStorage } from "./InMemoryTransactionStorage"; +import { InMemoryAsyncMerkleTreeStore } from "./InMemoryAsyncMerkleTreeStore"; @sequencerModule() @closeable() export class InMemoryDatabase extends SequencerModule implements Database { public dependencies(): StorageDependencyMinimumDependencies { return { - asyncMerkleStore: { - useClass: InMemoryAsyncMerkleTreeStore, + asyncLinkedLeafStore: { + useClass: InMemoryAsyncLinkedLeafStore, }, asyncStateService: { useFactory: () => new CachedStateService(undefined), @@ -39,8 +40,8 @@ export class InMemoryDatabase extends SequencerModule implements Database { unprovenStateService: { useFactory: () => new CachedStateService(undefined), }, - unprovenMerkleStore: { - useClass: InMemoryAsyncMerkleTreeStore, + unprovenLinkedLeafStore: { + useClass: InMemoryAsyncLinkedLeafStore, }, blockTreeStore: { useClass: InMemoryAsyncMerkleTreeStore, diff --git a/packages/sequencer/src/storage/model/Block.ts b/packages/sequencer/src/storage/model/Block.ts index 823b3c8c6..b16e8d748 100644 --- a/packages/sequencer/src/storage/model/Block.ts +++ b/packages/sequencer/src/storage/model/Block.ts @@ -5,7 +5,7 @@ import { BlockHashMerkleTreeWitness, NetworkState, } from "@proto-kit/protocol"; -import { RollupMerkleTree } from "@proto-kit/common"; +import { LinkedMerkleTree } from "@proto-kit/common"; import { PendingTransaction } from "../../mempool/PendingTransaction"; import { UntypedStateTransition } from "../../protocol/production/helpers/UntypedStateTransition"; @@ -105,7 +105,7 @@ export const BlockWithResult = { }, fromBlockHashRoot: Field(BlockHashMerkleTree.EMPTY_ROOT), fromMessagesHash: Field(0), - fromStateRoot: Field(RollupMerkleTree.EMPTY_ROOT), + fromStateRoot: LinkedMerkleTree.EMPTY_ROOT, toMessagesHash: ACTIONS_EMPTY_HASH, beforeBlockStateTransitions: [], @@ -113,12 +113,12 @@ export const BlockWithResult = { }, result: { afterNetworkState: NetworkState.empty(), - stateRoot: RollupMerkleTree.EMPTY_ROOT, + stateRoot: LinkedMerkleTree.EMPTY_ROOT.toBigInt(), blockHashRoot: BlockHashMerkleTree.EMPTY_ROOT, afterBlockStateTransitions: [], blockHashWitness: BlockHashMerkleTree.WITNESS.dummy(), blockHash: 0n, - witnessedRoots: [RollupMerkleTree.EMPTY_ROOT], + witnessedRoots: [LinkedMerkleTree.EMPTY_ROOT.toBigInt()], }, }) satisfies BlockWithResult, }; diff --git a/packages/sequencer/test/LinkedMerkleTreeIntegrity.ts b/packages/sequencer/test/LinkedMerkleTreeIntegrity.ts new file mode 100644 index 000000000..c19a6f5d8 --- /dev/null +++ b/packages/sequencer/test/LinkedMerkleTreeIntegrity.ts @@ -0,0 +1,46 @@ +import { Field } from "o1js"; +import { LinkedLeafStruct, log } from "@proto-kit/common"; + +import { AsyncLinkedLeafStore } from "../src/state/async/AsyncLinkedLeafStore"; + +export namespace LinkedMerkleTreeIntegrity { + export async function checkIntegrity(store: AsyncLinkedLeafStore) { + log.info("Checking tree integrity..."); + + let currentPath = 0n; + const maxPath = Field.ORDER - 1n; + + while (currentPath < maxPath) { + const leaves = await store.getLeavesAsync([currentPath]); + if (leaves.length === 0 || leaves[0] === undefined) { + return false; + } + + const leaf = leaves[0]!; + + const treeValues = await store.treeStore.getNodesAsync([ + { level: 0, key: leaf.index }, + ]); + if (treeValues.length === 0 || treeValues[0] === undefined) { + return false; + } + + const treeValue = treeValues[0]; + const leafHash = new LinkedLeafStruct( + LinkedLeafStruct.fromValue(leaf.leaf) + ) + .hash() + .toBigInt(); + + if (treeValue !== leafHash) { + return false; + } + if (leaf.leaf.nextPath <= currentPath) { + return false; + } + + currentPath = leaf.leaf.nextPath; + } + return true; + } +} diff --git a/packages/sequencer/test/integration/BlockProduction-test.ts b/packages/sequencer/test/integration/BlockProduction-test.ts new file mode 100644 index 000000000..537d8e322 --- /dev/null +++ b/packages/sequencer/test/integration/BlockProduction-test.ts @@ -0,0 +1,756 @@ +import { + log, + range, + MOCK_PROOF, + expectDefined, + mapSequential, + TypedClass, +} from "@proto-kit/common"; +import { VanillaProtocolModules } from "@proto-kit/library"; +import { + Runtime, + runtimeMethod, + RuntimeModule, + runtimeModule, + RuntimeEvents, +} from "@proto-kit/module"; +import { + AccountState, + MandatoryProtocolModulesRecord, + Path, + Protocol, + PROTOKIT_PREFIXES, +} from "@proto-kit/protocol"; +import { AppChain } from "@proto-kit/sdk"; +import { Bool, Field, PrivateKey, PublicKey, Struct, UInt64 } from "o1js"; +import "reflect-metadata"; +import { container } from "tsyringe"; +import { afterEach } from "@jest/globals"; + +import { + BatchStorage, + HistoricalBatchStorage, + Sequencer, + SequencerModule, + VanillaTaskWorkerModules, + DatabasePruneModule, + AsyncLinkedLeafStore, +} from "../../src"; +import { + DefaultTestingSequencerModules, + testingSequencerModules, +} from "../TestingSequencer"; +import { LinkedMerkleTreeIntegrity } from "../LinkedMerkleTreeIntegrity"; + +import { Balance } from "./mocks/Balance"; +import { ProtocolStateTestHook } from "./mocks/ProtocolStateTestHook"; +import { NoopRuntime } from "./mocks/NoopRuntime"; +import { BlockTestService } from "./services/BlockTestService"; + +export class PrimaryTestEvent extends Struct({ + message: Bool, +}) {} + +export class SecondaryTestEvent extends Struct({ + message: Bool, +}) {} + +@runtimeModule() +class EventMaker extends RuntimeModule { + public constructor() { + super(); + } + + public events = new RuntimeEvents({ + primary: PrimaryTestEvent, + secondary: SecondaryTestEvent, + }); + + @runtimeMethod() + public async makeEvent() { + this.events.emit("primary", new PrimaryTestEvent({ message: Bool(false) })); + // Should not emit as condition is false. + this.events.emitIf( + Bool(false), + "primary", + new PrimaryTestEvent({ message: Bool(false) }) + ); + this.events.emit( + "secondary", + new SecondaryTestEvent({ message: Bool(true) }) + ); + } +} + +export function testBlockProduction< + T extends TypedClass>, +>( + database: T, + databaseConfig: T extends TypedClass + ? Module extends SequencerModule + ? Config + : never + : never +) { + let runtime: Runtime<{ + Balance: typeof Balance; + NoopRuntime: typeof NoopRuntime; + EventMaker: typeof EventMaker; + }>; + let sequencer: Sequencer; + + let protocol: Protocol< + MandatoryProtocolModulesRecord & { + ProtocolStateTestHook: typeof ProtocolStateTestHook; + } + >; + + let appChain: AppChain; + + let test: BlockTestService; + let linkedLeafStore: AsyncLinkedLeafStore; + + beforeEach(async () => { + const runtimeClass = Runtime.from({ + modules: { + Balance, + NoopRuntime, + EventMaker, + }, + + config: { + Balance: {}, + NoopRuntime: {}, + EventMaker: {}, + }, + }); + + const sequencerClass = Sequencer.from({ + modules: { + DatabasePruneModule, + ...testingSequencerModules({}), + Database: database, + }, + }); + + // TODO Analyze how we can get rid of the library import for mandatory modules + const protocolClass = Protocol.from({ + modules: VanillaProtocolModules.mandatoryModules({ + ProtocolStateTestHook, + }), + }); + + const app = AppChain.from({ + Runtime: runtimeClass, + Sequencer: sequencerClass, + Protocol: protocolClass, + modules: {}, + }); + + app.configure({ + Sequencer: { + DatabasePruneModule: { + pruneOnStartup: true, + }, + Database: databaseConfig, + BlockTrigger: {}, + Mempool: {}, + BatchProducerModule: {}, + BlockProducerModule: {}, + LocalTaskWorkerModule: VanillaTaskWorkerModules.defaultConfig(), + BaseLayer: {}, + TaskQueue: {}, + FeeStrategy: {}, + SequencerStartupModule: {}, + }, + Runtime: { + Balance: {}, + NoopRuntime: {}, + EventMaker: {}, + }, + Protocol: { + AccountState: {}, + BlockProver: {}, + StateTransitionProver: {}, + BlockHeight: {}, + LastStateRoot: {}, + ProtocolStateTestHook: {}, + }, + }); + + try { + // Start AppChain + await app.start(false, container.createChildContainer()); + } catch (e) { + console.error(e); + throw e; + } + + appChain = app; + + // @ts-ignore + ({ runtime, sequencer, protocol } = app); + + test = app.sequencer.dependencyContainer.resolve(BlockTestService); + + linkedLeafStore = + app.sequencer.dependencyContainer.resolve( + "UnprovenLinkedLeafStore" + ); + }); + + afterEach(async () => { + await appChain.close(); + }); + + it("should produce a dummy block proof", async () => { + log.setLevel("DEBUG"); + expect.assertions(27); + + const privateKey = PrivateKey.random(); + const publicKey = privateKey.toPublicKey(); + + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey, + args: [publicKey, UInt64.from(100), Bool(true)], + }); + + // let [block, batch] = await blockTrigger.produceBlockAndBatch(); + let block = await test.produceBlock(); + + expect(block).toBeDefined(); + + expect(block!.transactions).toHaveLength(1); + expect(block!.transactions[0].status.toBoolean()).toBe(true); + expect(block!.transactions[0].statusMessage).toBeUndefined(); + + expect(block!.transactions[0].stateTransitions).toHaveLength(3); + expect( + block!.transactions[0].stateTransitions[0].stateTransitions + ).toHaveLength(2); + expect( + block!.transactions[0].stateTransitions[1].stateTransitions + ).toHaveLength(1); + + const latestBlockWithResult = await sequencer + .resolve("BlockQueue") + .getLatestBlockAndResult(); + + let batch = await test.produceBatch(); + + expect(batch).toBeDefined(); + + expect(batch!.blockHashes).toHaveLength(1); + expect(batch!.proof.proof).toBe(MOCK_PROOF); + + expectDefined(latestBlockWithResult); + expectDefined(latestBlockWithResult.result); + expect( + latestBlockWithResult.result.afterNetworkState.hash().toString() + ).toStrictEqual(batch!.toNetworkState.hash().toString()); + + // Check if the batchstorage has received the block + const batchStorage = sequencer.resolve("BatchStorage") as BatchStorage & + HistoricalBatchStorage; + const retrievedBatch = await batchStorage.getBatchAt(0); + expect(retrievedBatch).toBeDefined(); + + const balanceModule = runtime.resolve("Balance"); + const balancesPath = Path.fromKey( + balanceModule.balances.path!, + balanceModule.balances.keyType, + publicKey + ); + // TODO + // const newState = await test.getState(balancesPath, "batch"); + const newUnprovenState = await test.getState(balancesPath, "block"); + + // expect(newState).toBeDefined(); + expect(newUnprovenState).toBeDefined(); + // expect(UInt64.fromFields(newState!).toString()).toStrictEqual("100"); + expect(UInt64.fromFields(newUnprovenState!).toString()).toStrictEqual( + "100" + ); + + // Check that nonce has been set + const accountModule = protocol.resolve("AccountState"); + const accountStatePath = Path.fromKey( + accountModule.accountState.path!, + accountModule.accountState.keyType, + publicKey + ); + const newAccountState = await test.getState(accountStatePath, "block"); + + expect(newAccountState).toBeDefined(); + expect(AccountState.fromFields(newAccountState!).nonce.toBigInt()).toBe(1n); + + // Second tx + await test.addTransaction({ + method: ["Balance", "addBalanceToSelf"], + privateKey, + args: [UInt64.from(100), UInt64.from(1)], + }); + + log.info("Starting second block"); + + [block, batch] = await test.produceBlockAndBatch(); + + expect(block).toBeDefined(); + + expect(block!.transactions).toHaveLength(1); + expect(block!.transactions[0].status.toBoolean()).toBe(true); + expect(block!.transactions[0].statusMessage).toBeUndefined(); + + expect(batch!.blockHashes).toHaveLength(1); + expect(batch!.proof.proof).toBe(MOCK_PROOF); + + const state2 = await test.getState(balancesPath, "block"); + + expect(state2).toBeDefined(); + expect(UInt64.fromFields(state2!)).toStrictEqual(UInt64.from(200)); + + await expect( + LinkedMerkleTreeIntegrity.checkIntegrity(linkedLeafStore) + ).resolves.toBe(true); + }, 60_000); + + it("should reject tx and not apply the state", async () => { + expect.assertions(6); + + const privateKey = PrivateKey.random(); + + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey, + args: [PrivateKey.random().toPublicKey(), UInt64.from(100), Bool(false)], + }); + + const [block] = await test.produceBlockAndBatch(); + + expect(block?.transactions).toHaveLength(1); + expect(block?.transactions[0].status.toBoolean()).toBe(false); + expect(block?.transactions[0].statusMessage).toBe("Condition not met"); + + const balanceModule = runtime.resolve("Balance"); + const balancesPath = Path.fromKey( + balanceModule.balances.path!, + balanceModule.balances.keyType, + PublicKey.empty() + ); + const unprovenState = await test.getState(balancesPath, "block"); + const newState = await test.getState(balancesPath, "batch"); + + // Assert that state is not set + expect(unprovenState).toBeUndefined(); + expect(newState).toBeUndefined(); + + await expect( + LinkedMerkleTreeIntegrity.checkIntegrity(linkedLeafStore) + ).resolves.toBe(true); + }, 30_000); + + it("should produce txs in non-consecutive blocks", async () => { + const privateKey = PrivateKey.random(); + const publicKey = privateKey.toPublicKey(); + + const privateKey2 = PrivateKey.random(); + const publicKey2 = privateKey2.toPublicKey(); + + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey, + args: [publicKey, UInt64.from(100), Bool(true)], + }); + + // let [block, batch] = await blockTrigger.produceBlockAndBatch(); + const block = await test.produceBlock(); + + expect(block).toBeDefined(); + + expect(block!.transactions).toHaveLength(1); + expect(block!.transactions[0].status.toBoolean()).toBe(true); + expect(block!.transactions[0].statusMessage).toBeUndefined(); + + expect( + block!.transactions[0].stateTransitions[0].stateTransitions + ).toHaveLength(2); + expect( + block!.transactions[0].stateTransitions[1].stateTransitions + ).toHaveLength(1); + + await test.produceBlock(); + + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey: privateKey2, + args: [publicKey2, UInt64.from(100), Bool(true)], + }); + await test.produceBlock(); + + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey: privateKey2, + args: [publicKey2, UInt64.from(100), Bool(true)], + }); + + await test.produceBlock(); + + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey: privateKey2, + args: [publicKey2, UInt64.from(100), Bool(true)], + }); + + await test.produceBlock(); + + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey: privateKey2, + args: [publicKey2, UInt64.from(100), Bool(true)], + }); + await test.produceBlock(); + + // Second tx + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey, + args: [publicKey, UInt64.from(100), Bool(true)], + }); + + log.info("Starting second block"); + + const block2 = await test.produceBlock(); + + expect(block2).toBeDefined(); + + expect(block2!.transactions).toHaveLength(1); + expect(block2!.transactions[0].status.toBoolean()).toBe(true); + expect(block2!.transactions[0].statusMessage).toBeUndefined(); + + await expect( + LinkedMerkleTreeIntegrity.checkIntegrity(linkedLeafStore) + ).resolves.toBe(true); + }, 60_000); + + const numberTxs = 3; + + it("should produce block with multiple transaction", async () => { + log.setLevel("TRACE"); + + expect.assertions(6 + 4 * numberTxs); + + const privateKey = PrivateKey.random(); + const publicKey = privateKey.toPublicKey(); + + const increment = 100; + + await mapSequential(range(0, numberTxs), async (index) => { + await test.addTransaction({ + method: ["Balance", "addBalanceToSelf"], + privateKey, + args: [UInt64.from(increment), UInt64.from(0)], + }); + }); + + const block = await test.produceBlock(); + + expect(block).toBeDefined(); + expect(block!.transactions).toHaveLength(numberTxs); + + range(0, numberTxs).forEach((index) => { + expect(block!.transactions[index].status.toBoolean()).toBe(true); + expect(block!.transactions[index].statusMessage).toBe(undefined); + + const transitions = + block!.transactions[index].stateTransitions[1].stateTransitions; + + const fromBalance = increment * index; + expect(transitions[0].fromValue.value[0].toBigInt()).toStrictEqual( + BigInt(fromBalance) + ); + expect(transitions[0].toValue.value[0].toBigInt()).toStrictEqual( + BigInt(fromBalance + increment) + ); + }); + + const batch = await test.produceBatch(); + + expect(batch!.blockHashes).toHaveLength(1); + expect(batch!.proof.proof).toBe(MOCK_PROOF); + + const balanceModule = runtime.resolve("Balance"); + const balancesPath = Path.fromKey( + balanceModule.balances.path!, + balanceModule.balances.keyType, + publicKey + ); + const newState = await test.getState(balancesPath, "block"); + + expect(newState).toBeDefined(); + expect(UInt64.fromFields(newState!)).toStrictEqual( + UInt64.from(100 * numberTxs) + ); + }, 160_000); + + it("should produce a block with a mix of failing and succeeding transactions and empty blocks", async () => { + expect.assertions(7); + + log.setLevel("TRACE"); + + const pk1 = PrivateKey.random(); + const pk2 = PrivateKey.random(); + + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey: pk1, + args: [pk1.toPublicKey(), UInt64.from(100), Bool(false)], + }); + await test.addTransaction({ + method: ["Balance", "setBalanceIf"], + privateKey: pk2, + args: [pk2.toPublicKey(), UInt64.from(100), Bool(true)], + }); + + const block = await test.produceBlock(); + await test.produceBlock(); + const batch = await test.produceBatch(); + + console.log("Pt1"); + + expect(block).toBeDefined(); + + expect(batch!.blockHashes).toHaveLength(2); + expect(block!.transactions).toHaveLength(2); + + const balanceModule = runtime.resolve("Balance"); + const balancesPath1 = Path.fromKey( + balanceModule.balances.path!, + balanceModule.balances.keyType, + pk1.toPublicKey() + ); + const newState1 = await test.getState(balancesPath1, "block"); + + expect(newState1).toBeUndefined(); + + const balancesPath2 = Path.fromKey( + balanceModule.balances.path!, + balanceModule.balances.keyType, + pk2.toPublicKey() + ); + const newState2 = await test.getState(balancesPath2, "block"); + + expect(newState2).toBeDefined(); + expect(UInt64.fromFields(newState2!)).toStrictEqual(UInt64.from(100)); + + await test.produceBlock(); + await test.produceBlock(); + const proven2 = await test.produceBatch(); + + expect(proven2?.blockHashes.length).toBe(2); + }, 720_000); + + // TODO Test with batch that only consists of empty blocks + + it.each([ + [2, 1, 1], + [1, 2, 1], + [1, 1, 2], + [2, 2, 2], + [1, 14, 0], + ])( + "should produce multiple blocks with multiple batches with multiple transactions", + async (batches, blocksPerBatch, txsPerBlock) => { + expect.assertions( + 2 * batches + + 1 * batches * blocksPerBatch + + 2 * batches * blocksPerBatch * txsPerBlock + ); + + log.setLevel("DEBUG"); + + const sender = PrivateKey.random(); + + const keys = range(0, batches * blocksPerBatch * txsPerBlock).map(() => + PrivateKey.random() + ); + + const increment = 100; + + let iterationIndex = 0; + + for (let i = 0; i < batches; i++) { + for (let j = 0; j < blocksPerBatch; j++) { + for (let k = 0; k < txsPerBlock; k++) { + await test.addTransaction({ + method: ["Balance", "addBalance"], + privateKey: sender, + args: [ + keys[iterationIndex].toPublicKey(), + UInt64.from(increment * (iterationIndex + 1)), + ], + }); + + iterationIndex += 1; + } + + // Produce block + const block = await test.produceBlock(); + + expect(block).toBeDefined(); + + for (let k = 0; k < txsPerBlock; k++) { + expect(block!.transactions).toHaveLength(txsPerBlock); + expect(block!.transactions[0].status.toBoolean()).toBe(true); + } + } + + const batch = await test.produceBatch(); + + expect(batch).toBeDefined(); + expect(batch!.blockHashes).toHaveLength(blocksPerBatch); + } + }, + 500_000 + ); + + it("should produce block with a tx with a lot of STs", async () => { + expect.assertions(11); + + const privateKey = PrivateKey.random(); + + const field = Field(100); + + await test.addTransaction({ + method: ["Balance", "lotOfSTs"], + privateKey, + args: [field], + }); + + const [block, batch] = await test.produceBlockAndBatch(); + + expect(block).toBeDefined(); + expect(batch).toBeDefined(); + + expect(block!.transactions).toHaveLength(1); + + expect(block!.transactions[0].status.toBoolean()).toBe(true); + expect(block!.transactions[0].statusMessage).toBe(undefined); + + expect(batch!.blockHashes).toHaveLength(1); + expect(batch!.proof.proof).toBe(MOCK_PROOF); + + const supplyPath = Path.fromProperty( + "Balance", + "totalSupply", + PROTOKIT_PREFIXES.STATE_RUNTIME + ); + const newState = await test.getState(supplyPath, "block"); + + expect(newState).toBeDefined(); + expect(UInt64.fromFields(newState!)).toStrictEqual( + // 10 is the number of iterations inside the runtime method + UInt64.from(100 * 10) + ); + + const pk2 = PublicKey.from({ x: field.add(Field(2)), isOdd: Bool(false) }); + const balanceModule = runtime.resolve("Balance"); + const balancesPath = Path.fromKey( + balanceModule.balances.path!, + balanceModule.balances.keyType, + pk2 + ); + + const newBalance = await test.getState(balancesPath, "block"); + + expect(newBalance).toBeDefined(); + expect(UInt64.fromFields(newBalance!)).toStrictEqual(UInt64.from(200)); + }, 360_000); + + it("regression - should produce block with no STs emitted", async () => { + const privateKey = PrivateKey.random(); + + await test.addTransaction({ + method: ["NoopRuntime", "emittingNoSTs"], + privateKey, + args: [], + }); + + const block = await test.produceBlock(); + + expect(block).toBeDefined(); + + expect(block!.transactions).toHaveLength(1); + expect(block!.transactions[0].status.toBoolean()).toBe(true); + expect(block!.transactions[0].statusMessage).toBeUndefined(); + + expect( + block!.transactions[0].stateTransitions[0].stateTransitions + ).toHaveLength(2); + expect( + block!.transactions[0].stateTransitions[1].stateTransitions + ).toHaveLength(0); + + const batch = await test.produceBatch(); + + expect(batch).toBeDefined(); + + expect(batch!.blockHashes).toHaveLength(1); + expect(batch!.proof.proof).toBe(MOCK_PROOF); + }, 30000); + + it("events - should produce block with the right events", async () => { + log.setLevel("TRACE"); + + const privateKey = PrivateKey.random(); + + await test.addTransaction({ + method: ["EventMaker", "makeEvent"], + privateKey, + args: [], + }); + + const firstExpectedEvent = { + eventType: PrimaryTestEvent, + event: new PrimaryTestEvent({ + message: Bool(false), + }), + eventName: "primary", + }; + + const secondExpectedEvent = { + eventType: SecondaryTestEvent, + event: new SecondaryTestEvent({ + message: Bool(true), + }), + eventName: "secondary", + }; + const firstEventReduced = { + eventName: firstExpectedEvent.eventName, + data: firstExpectedEvent.eventType.toFields(firstExpectedEvent.event), + source: "runtime", + }; + + const secondEventReduced = { + eventName: secondExpectedEvent.eventName, + data: secondExpectedEvent.eventType.toFields(secondExpectedEvent.event), + source: "runtime", + }; + + const block = await test.produceBlock(); + + expect(block).toBeDefined(); + + expect(block!.transactions).toHaveLength(1); + expect(block!.transactions[0].events).toHaveLength(2); + expect(block!.transactions[0].events[0]).toStrictEqual(firstEventReduced); + expect(block!.transactions[0].events[1]).toStrictEqual(secondEventReduced); + + const batch = await test.produceBatch(); + + expect(batch).toBeDefined(); + + expect(batch!.blockHashes).toHaveLength(1); + expect(batch!.proof.proof).toBe(MOCK_PROOF); + }, 30000); +} diff --git a/packages/sequencer/test/integration/BlockProduction.test.ts b/packages/sequencer/test/integration/BlockProduction.test.ts index 049b5d223..2ff7920dc 100644 --- a/packages/sequencer/test/integration/BlockProduction.test.ts +++ b/packages/sequencer/test/integration/BlockProduction.test.ts @@ -1,714 +1,9 @@ -import { - log, - range, - MOCK_PROOF, - expectDefined, - mapSequential, -} from "@proto-kit/common"; -import { VanillaProtocolModules } from "@proto-kit/library"; -import { - Runtime, - runtimeMethod, - RuntimeModule, - runtimeModule, - RuntimeEvents, -} from "@proto-kit/module"; -import { - AccountState, - MandatoryProtocolModulesRecord, - Path, - Protocol, - PROTOKIT_PREFIXES, -} from "@proto-kit/protocol"; -import { AppChain } from "@proto-kit/sdk"; -import { Bool, Field, PrivateKey, PublicKey, Struct, UInt64 } from "o1js"; import "reflect-metadata"; -import { container } from "tsyringe"; -import { - BatchStorage, - HistoricalBatchStorage, - Sequencer, - VanillaTaskWorkerModules, -} from "../../src"; -import { - DefaultTestingSequencerModules, - testingSequencerModules, -} from "../TestingSequencer"; +import { InMemoryDatabase } from "../../src"; -import { Balance } from "./mocks/Balance"; -import { ProtocolStateTestHook } from "./mocks/ProtocolStateTestHook"; -import { NoopRuntime } from "./mocks/NoopRuntime"; -import { BlockTestService } from "./services/BlockTestService"; - -export class PrimaryTestEvent extends Struct({ - message: Bool, -}) {} - -export class SecondaryTestEvent extends Struct({ - message: Bool, -}) {} - -@runtimeModule() -class EventMaker extends RuntimeModule { - public constructor() { - super(); - } - - public events = new RuntimeEvents({ - primary: PrimaryTestEvent, - secondary: SecondaryTestEvent, - }); - - @runtimeMethod() - public async makeEvent() { - this.events.emit("primary", new PrimaryTestEvent({ message: Bool(false) })); - // Should not emit as condition is false. - this.events.emitIf( - Bool(false), - "primary", - new PrimaryTestEvent({ message: Bool(false) }) - ); - this.events.emit( - "secondary", - new SecondaryTestEvent({ message: Bool(true) }) - ); - } -} +import { testBlockProduction } from "./BlockProduction-test"; describe("block production", () => { - let runtime: Runtime<{ - Balance: typeof Balance; - NoopRuntime: typeof NoopRuntime; - EventMaker: typeof EventMaker; - }>; - let sequencer: Sequencer; - - let protocol: Protocol< - MandatoryProtocolModulesRecord & { - ProtocolStateTestHook: typeof ProtocolStateTestHook; - } - >; - // let protocol: Protocol; - - // eslint-disable-next-line @typescript-eslint/no-unused-vars - let appChain: AppChain; - - let test: BlockTestService; - - beforeEach(async () => { - const runtimeClass = Runtime.from({ - modules: { - Balance, - NoopRuntime, - EventMaker, - }, - - config: { - Balance: {}, - NoopRuntime: {}, - EventMaker: {}, - }, - }); - - const sequencerClass = Sequencer.from({ - modules: testingSequencerModules({}), - }); - - // TODO Analyze how we can get rid of the library import for mandatory modules - const protocolClass = Protocol.from({ - modules: VanillaProtocolModules.mandatoryModules({ - ProtocolStateTestHook, - }), - // modules: VanillaProtocolModules.with({}), - }); - - const app = AppChain.from({ - Runtime: runtimeClass, - Sequencer: sequencerClass, - Protocol: protocolClass, - modules: {}, - }); - - app.configure({ - Sequencer: { - Database: {}, - BlockTrigger: {}, - Mempool: {}, - BatchProducerModule: {}, - BlockProducerModule: {}, - LocalTaskWorkerModule: VanillaTaskWorkerModules.defaultConfig(), - BaseLayer: {}, - TaskQueue: {}, - FeeStrategy: {}, - SequencerStartupModule: {}, - }, - Runtime: { - Balance: {}, - NoopRuntime: {}, - EventMaker: {}, - }, - Protocol: { - AccountState: {}, - BlockProver: {}, - StateTransitionProver: {}, - BlockHeight: {}, - LastStateRoot: {}, - ProtocolStateTestHook: {}, - }, - }); - - try { - // Start AppChain - await app.start(false, container.createChildContainer()); - } catch (e) { - console.error(e); - throw e; - } - - appChain = app; - - // @ts-ignore - ({ runtime, sequencer, protocol } = app); - - test = app.sequencer.dependencyContainer.resolve(BlockTestService); - }); - - it("should produce a dummy block proof", async () => { - expect.assertions(26); - - const privateKey = PrivateKey.random(); - const publicKey = privateKey.toPublicKey(); - - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey, - args: [publicKey, UInt64.from(100), Bool(true)], - }); - - // let [block, batch] = await blockTrigger.produceBlockAndBatch(); - let block = await test.produceBlock(); - - expect(block).toBeDefined(); - - expect(block!.transactions).toHaveLength(1); - expect(block!.transactions[0].status.toBoolean()).toBe(true); - expect(block!.transactions[0].statusMessage).toBeUndefined(); - - expect(block!.transactions[0].stateTransitions).toHaveLength(3); - expect( - block!.transactions[0].stateTransitions[0].stateTransitions - ).toHaveLength(2); - expect( - block!.transactions[0].stateTransitions[1].stateTransitions - ).toHaveLength(1); - - const latestBlockWithResult = await sequencer - .resolve("BlockQueue") - .getLatestBlockAndResult(); - - let batch = await test.produceBatch(); - - expect(batch).toBeDefined(); - - expect(batch!.blockHashes).toHaveLength(1); - expect(batch!.proof.proof).toBe(MOCK_PROOF); - - expectDefined(latestBlockWithResult); - expectDefined(latestBlockWithResult.result); - expect( - latestBlockWithResult.result.afterNetworkState.hash().toString() - ).toStrictEqual(batch!.toNetworkState.hash().toString()); - - // Check if the batchstorage has received the block - const batchStorage = sequencer.resolve("BatchStorage") as BatchStorage & - HistoricalBatchStorage; - const retrievedBatch = await batchStorage.getBatchAt(0); - expect(retrievedBatch).toBeDefined(); - - const balanceModule = runtime.resolve("Balance"); - const balancesPath = Path.fromKey( - balanceModule.balances.path!, - balanceModule.balances.keyType, - publicKey - ); - // TODO - // const newState = await test.getState(balancesPath, "batch"); - const newUnprovenState = await test.getState(balancesPath, "block"); - - // expect(newState).toBeDefined(); - expect(newUnprovenState).toBeDefined(); - // expect(UInt64.fromFields(newState!).toString()).toStrictEqual("100"); - expect(UInt64.fromFields(newUnprovenState!).toString()).toStrictEqual( - "100" - ); - - // Check that nonce has been set - const accountModule = protocol.resolve("AccountState"); - const accountStatePath = Path.fromKey( - accountModule.accountState.path!, - accountModule.accountState.keyType, - publicKey - ); - const newAccountState = await test.getState(accountStatePath, "block"); - - expect(newAccountState).toBeDefined(); - expect(AccountState.fromFields(newAccountState!).nonce.toBigInt()).toBe(1n); - - // Second tx - await test.addTransaction({ - method: ["Balance", "addBalanceToSelf"], - privateKey, - args: [UInt64.from(100), UInt64.from(1)], - }); - - log.info("Starting second block"); - - [block, batch] = await test.produceBlockAndBatch(); - - expect(block).toBeDefined(); - - expect(block!.transactions).toHaveLength(1); - expect(block!.transactions[0].status.toBoolean()).toBe(true); - expect(block!.transactions[0].statusMessage).toBeUndefined(); - - expect(batch!.blockHashes).toHaveLength(1); - expect(batch!.proof.proof).toBe(MOCK_PROOF); - - const state2 = await test.getState(balancesPath, "block"); - - expect(state2).toBeDefined(); - expect(UInt64.fromFields(state2!)).toStrictEqual(UInt64.from(200)); - }, 60_000); - - it("should reject tx and not apply the state", async () => { - expect.assertions(5); - - const privateKey = PrivateKey.random(); - - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey, - args: [PrivateKey.random().toPublicKey(), UInt64.from(100), Bool(false)], - }); - - const [block] = await test.produceBlockAndBatch(); - - expect(block?.transactions).toHaveLength(1); - expect(block?.transactions[0].status.toBoolean()).toBe(false); - expect(block?.transactions[0].statusMessage).toBe("Condition not met"); - - const balanceModule = runtime.resolve("Balance"); - const balancesPath = Path.fromKey( - balanceModule.balances.path!, - balanceModule.balances.keyType, - PublicKey.empty() - ); - const unprovenState = await test.getState(balancesPath, "block"); - const newState = await test.getState(balancesPath, "batch"); - - // Assert that state is not set - expect(unprovenState).toBeUndefined(); - expect(newState).toBeUndefined(); - }, 30_000); - - it("should produce txs in non-consecutive blocks", async () => { - const privateKey = PrivateKey.random(); - const publicKey = privateKey.toPublicKey(); - - const privateKey2 = PrivateKey.random(); - const publicKey2 = privateKey2.toPublicKey(); - - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey, - args: [publicKey, UInt64.from(100), Bool(true)], - }); - - // let [block, batch] = await blockTrigger.produceBlockAndBatch(); - const block = await test.produceBlock(); - - expect(block).toBeDefined(); - - expect(block!.transactions).toHaveLength(1); - expect(block!.transactions[0].status.toBoolean()).toBe(true); - expect(block!.transactions[0].statusMessage).toBeUndefined(); - - expect( - block!.transactions[0].stateTransitions[0].stateTransitions - ).toHaveLength(2); - expect( - block!.transactions[0].stateTransitions[1].stateTransitions - ).toHaveLength(1); - - await test.produceBlock(); - - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey: privateKey2, - args: [publicKey2, UInt64.from(100), Bool(true)], - }); - await test.produceBlock(); - - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey: privateKey2, - args: [publicKey2, UInt64.from(100), Bool(true)], - }); - - await test.produceBlock(); - - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey: privateKey2, - args: [publicKey2, UInt64.from(100), Bool(true)], - }); - - await test.produceBlock(); - - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey: privateKey2, - args: [publicKey2, UInt64.from(100), Bool(true)], - }); - await test.produceBlock(); - - // Second tx - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey, - args: [publicKey, UInt64.from(100), Bool(true)], - }); - - log.info("Starting second block"); - - const block2 = await test.produceBlock(); - - expect(block2).toBeDefined(); - - expect(block2!.transactions).toHaveLength(1); - expect(block2!.transactions[0].status.toBoolean()).toBe(true); - expect(block2!.transactions[0].statusMessage).toBeUndefined(); - }, 60_000); - - const numberTxs = 3; - - it("should produce block with multiple transaction", async () => { - log.setLevel("TRACE"); - - expect.assertions(6 + 4 * numberTxs); - - const privateKey = PrivateKey.random(); - const publicKey = privateKey.toPublicKey(); - - const increment = 100; - - await mapSequential(range(0, numberTxs), async (index) => { - await test.addTransaction({ - method: ["Balance", "addBalanceToSelf"], - privateKey, - args: [UInt64.from(increment), UInt64.from(0)], - }); - }); - - const block = await test.produceBlock(); - - expect(block).toBeDefined(); - expect(block!.transactions).toHaveLength(numberTxs); - - range(0, numberTxs).forEach((index) => { - expect(block!.transactions[index].status.toBoolean()).toBe(true); - expect(block!.transactions[index].statusMessage).toBe(undefined); - - const transitions = - block!.transactions[index].stateTransitions[1].stateTransitions; - - const fromBalance = increment * index; - expect(transitions[0].fromValue.value[0].toBigInt()).toStrictEqual( - BigInt(fromBalance) - ); - expect(transitions[0].toValue.value[0].toBigInt()).toStrictEqual( - BigInt(fromBalance + increment) - ); - }); - - const batch = await test.produceBatch(); - - expect(batch!.blockHashes).toHaveLength(1); - expect(batch!.proof.proof).toBe(MOCK_PROOF); - - const balanceModule = runtime.resolve("Balance"); - const balancesPath = Path.fromKey( - balanceModule.balances.path!, - balanceModule.balances.keyType, - publicKey - ); - const newState = await test.getState(balancesPath, "block"); - - expect(newState).toBeDefined(); - expect(UInt64.fromFields(newState!)).toStrictEqual( - UInt64.from(100 * numberTxs) - ); - }, 160_000); - - it("should produce a block with a mix of failing and succeeding transactions and empty blocks", async () => { - expect.assertions(7); - - log.setLevel("TRACE"); - - const pk1 = PrivateKey.random(); - const pk2 = PrivateKey.random(); - - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey: pk1, - args: [pk1.toPublicKey(), UInt64.from(100), Bool(false)], - }); - await test.addTransaction({ - method: ["Balance", "setBalanceIf"], - privateKey: pk2, - args: [pk2.toPublicKey(), UInt64.from(100), Bool(true)], - }); - - const block = await test.produceBlock(); - await test.produceBlock(); - const batch = await test.produceBatch(); - - console.log("Pt1"); - - expect(block).toBeDefined(); - - expect(batch!.blockHashes).toHaveLength(2); - expect(block!.transactions).toHaveLength(2); - - const balanceModule = runtime.resolve("Balance"); - const balancesPath1 = Path.fromKey( - balanceModule.balances.path!, - balanceModule.balances.keyType, - pk1.toPublicKey() - ); - const newState1 = await test.getState(balancesPath1, "block"); - - expect(newState1).toBeUndefined(); - - const balancesPath2 = Path.fromKey( - balanceModule.balances.path!, - balanceModule.balances.keyType, - pk2.toPublicKey() - ); - const newState2 = await test.getState(balancesPath2, "block"); - - expect(newState2).toBeDefined(); - expect(UInt64.fromFields(newState2!)).toStrictEqual(UInt64.from(100)); - - await test.produceBlock(); - await test.produceBlock(); - const proven2 = await test.produceBatch(); - - expect(proven2?.blockHashes.length).toBe(2); - }, 720_000); - - // TODO Test with batch that only consists of empty blocks - - it.each([ - [2, 1, 1], - [1, 2, 1], - [1, 1, 2], - [2, 2, 2], - [1, 14, 0], - ])( - "should produce multiple blocks with multiple batches with multiple transactions", - async (batches, blocksPerBatch, txsPerBlock) => { - expect.assertions( - 2 * batches + - 1 * batches * blocksPerBatch + - 2 * batches * blocksPerBatch * txsPerBlock - ); - - log.setLevel("DEBUG"); - - const sender = PrivateKey.random(); - - const keys = range(0, batches * blocksPerBatch * txsPerBlock).map(() => - PrivateKey.random() - ); - - const increment = 100; - - let iterationIndex = 0; - - for (let i = 0; i < batches; i++) { - for (let j = 0; j < blocksPerBatch; j++) { - for (let k = 0; k < txsPerBlock; k++) { - await test.addTransaction({ - method: ["Balance", "addBalance"], - privateKey: sender, - args: [ - keys[iterationIndex].toPublicKey(), - UInt64.from(increment * (iterationIndex + 1)), - ], - }); - - iterationIndex += 1; - } - - // Produce block - const block = await test.produceBlock(); - - expect(block).toBeDefined(); - - for (let k = 0; k < txsPerBlock; k++) { - expect(block!.transactions).toHaveLength(txsPerBlock); - expect(block!.transactions[0].status.toBoolean()).toBe(true); - } - } - - const batch = await test.produceBatch(); - - expect(batch).toBeDefined(); - expect(batch!.blockHashes).toHaveLength(blocksPerBatch); - } - }, - 500_000 - ); - - it("should produce block with a tx with a lot of STs", async () => { - expect.assertions(11); - - const privateKey = PrivateKey.random(); - - const field = Field(100); - - await test.addTransaction({ - method: ["Balance", "lotOfSTs"], - privateKey, - args: [field], - }); - - const [block, batch] = await test.produceBlockAndBatch(); - - expect(block).toBeDefined(); - expect(batch).toBeDefined(); - - expect(block!.transactions).toHaveLength(1); - - expect(block!.transactions[0].status.toBoolean()).toBe(true); - expect(block!.transactions[0].statusMessage).toBe(undefined); - - expect(batch!.blockHashes).toHaveLength(1); - expect(batch!.proof.proof).toBe(MOCK_PROOF); - - const supplyPath = Path.fromProperty( - "Balance", - "totalSupply", - PROTOKIT_PREFIXES.STATE_RUNTIME - ); - const newState = await test.getState(supplyPath, "block"); - - expect(newState).toBeDefined(); - expect(UInt64.fromFields(newState!)).toStrictEqual( - // 10 is the number of iterations inside the runtime method - UInt64.from(100 * 10) - ); - - const pk2 = PublicKey.from({ x: field.add(Field(2)), isOdd: Bool(false) }); - const balanceModule = runtime.resolve("Balance"); - const balancesPath = Path.fromKey( - balanceModule.balances.path!, - balanceModule.balances.keyType, - pk2 - ); - - const newBalance = await test.getState(balancesPath, "block"); - - expect(newBalance).toBeDefined(); - expect(UInt64.fromFields(newBalance!)).toStrictEqual(UInt64.from(200)); - }, 360_000); - - it("regression - should produce block with no STs emitted", async () => { - const privateKey = PrivateKey.random(); - - await test.addTransaction({ - method: ["NoopRuntime", "emittingNoSTs"], - privateKey, - args: [], - }); - - const block = await test.produceBlock(); - - expect(block).toBeDefined(); - - expect(block!.transactions).toHaveLength(1); - expect(block!.transactions[0].status.toBoolean()).toBe(true); - expect(block!.transactions[0].statusMessage).toBeUndefined(); - - expect( - block!.transactions[0].stateTransitions[0].stateTransitions - ).toHaveLength(2); - expect( - block!.transactions[0].stateTransitions[1].stateTransitions - ).toHaveLength(0); - - const batch = await test.produceBatch(); - - expect(batch).toBeDefined(); - - expect(batch!.blockHashes).toHaveLength(1); - expect(batch!.proof.proof).toBe(MOCK_PROOF); - }, 30000); - - it("events - should produce block with the right events", async () => { - log.setLevel("TRACE"); - - const privateKey = PrivateKey.random(); - - await test.addTransaction({ - method: ["EventMaker", "makeEvent"], - privateKey, - args: [], - }); - - const firstExpectedEvent = { - eventType: PrimaryTestEvent, - event: new PrimaryTestEvent({ - message: Bool(false), - }), - eventName: "primary", - }; - - const secondExpectedEvent = { - eventType: SecondaryTestEvent, - event: new SecondaryTestEvent({ - message: Bool(true), - }), - eventName: "secondary", - }; - const firstEventReduced = { - eventName: firstExpectedEvent.eventName, - data: firstExpectedEvent.eventType.toFields(firstExpectedEvent.event), - source: "runtime", - }; - - const secondEventReduced = { - eventName: secondExpectedEvent.eventName, - data: secondExpectedEvent.eventType.toFields(secondExpectedEvent.event), - source: "runtime", - }; - - const block = await test.produceBlock(); - - expect(block).toBeDefined(); - - expect(block!.transactions).toHaveLength(1); - expect(block!.transactions[0].events).toHaveLength(2); - expect(block!.transactions[0].events[0]).toStrictEqual(firstEventReduced); - expect(block!.transactions[0].events[1]).toStrictEqual(secondEventReduced); - - const batch = await test.produceBatch(); - - expect(batch).toBeDefined(); - - expect(batch!.blockHashes).toHaveLength(1); - expect(batch!.proof.proof).toBe(MOCK_PROOF); - }, 30000); + testBlockProduction(InMemoryDatabase, {}); }); diff --git a/packages/sequencer/test/merkle/CachedLinkedMerkleStore.test.ts b/packages/sequencer/test/merkle/CachedLinkedMerkleStore.test.ts new file mode 100644 index 000000000..5ceae2768 --- /dev/null +++ b/packages/sequencer/test/merkle/CachedLinkedMerkleStore.test.ts @@ -0,0 +1,545 @@ +import { + expectDefined, + LinkedLeafStruct, + LinkedMerkleTree, +} from "@proto-kit/common"; +import { beforeEach, expect } from "@jest/globals"; +import { Field, Poseidon } from "o1js"; + +import { CachedLinkedLeafStore } from "../../src/state/lmt/CachedLinkedLeafStore"; +import { InMemoryAsyncLinkedLeafStore } from "../../src/storage/inmemory/InMemoryAsyncLinkedLeafStore"; +import { SyncCachedLinkedLeafStore } from "../../src/state/merkle/SyncCachedLinkedLeafStore"; + +describe("cached linked merkle store", () => { + let mainStore: InMemoryAsyncLinkedLeafStore; + + let cache1: CachedLinkedLeafStore; + let tree1: LinkedMerkleTree; + + beforeEach(async () => { + mainStore = new InMemoryAsyncLinkedLeafStore(); + + const cachedStore = await CachedLinkedLeafStore.new(mainStore); + + const tmpTree = new LinkedMerkleTree(cachedStore.treeStore, cachedStore); + tmpTree.setLeaf(5n, 10n); + + await cachedStore.mergeIntoParent(); + + cache1 = await CachedLinkedLeafStore.new(mainStore); + tree1 = new LinkedMerkleTree(cache1.treeStore, cache1); + }); + + it("should cache multiple keys correctly", async () => { + expect.assertions(11); + await cache1.preloadKeys([16n, 46n]); + tree1.setLeaf(16n, 16n); + tree1.setLeaf(46n, 46n); + + const cache2 = new SyncCachedLinkedLeafStore(cache1); + const tree2 = new LinkedMerkleTree(cache2.treeStore, cache2); + + const leaf1 = tree1.getLeaf(16n); + const leaf2 = tree1.getLeaf(46n); + + expectDefined(leaf1); + expectDefined(leaf2); + + const storedLeaf1 = cache2.getLeaf(16n); + const storedLeaf2 = cache2.getLeaf(46n); + + expectDefined(storedLeaf1); + expectDefined(storedLeaf2); + + expect(storedLeaf1.index).toStrictEqual(2n); + expect(storedLeaf2.index).toStrictEqual(3n); + + expect(tree2.tree.getNode(0, storedLeaf1.index).toBigInt()).toBe( + leaf1.hash().toBigInt() + ); + expect(tree2.tree.getNode(0, storedLeaf2.index).toBigInt()).toBe( + leaf2.hash().toBigInt() + ); + + expect(tree2.getLeaf(16n)).toEqual(leaf1); + expect(tree2.getLeaf(46n)).toEqual(leaf2); + + expect(tree2.getRoot().toString()).toStrictEqual( + tree1.getRoot().toString() + ); + }); + + it("simple test - check hash of updated node is updated", async () => { + // main store already has 0n and 5n paths defined. + // preloading 10n should load up 5n in the cache1 leaf and node stores. + await cache1.preloadKeys([10n]); + + expectDefined(cache1.getLeaf(5n)); + expectDefined(cache1.treeStore.getNode(1n, 0)); + + tree1.setLeaf(10n, 10n); + await cache1.mergeIntoParent(); + + const leaf5 = tree1.getLeaf(5n); + const leaf10 = tree1.getLeaf(10n); + expectDefined(leaf5); + expectDefined(leaf10); + + const storedLeaf5 = cache1.getLeaf(5n); + const storedLeaf10 = cache1.getLeaf(10n); + + expectDefined(storedLeaf5); + expectDefined(storedLeaf10); + + expect(storedLeaf5).toStrictEqual({ + leaf: { value: 10n, path: 5n, nextPath: 10n }, + index: 1n, + }); + expect(storedLeaf10.index).toStrictEqual(2n); + + // Check leaves were hashed properly when added to nodes/merkle-tree + expect(cache1.treeStore.getNode(storedLeaf10.index, 0)).toStrictEqual( + leaf10.hash().toBigInt() + ); + expect(cache1.treeStore.getNode(storedLeaf5.index, 0)).toStrictEqual( + leaf5.hash().toBigInt() + ); + }); + + it("should preload through multiple levels and insert correctly at right index", async () => { + await cache1.preloadKeys([10n, 11n, 12n, 13n]); + + tree1.setLeaf(10n, 10n); + tree1.setLeaf(11n, 11n); + tree1.setLeaf(12n, 12n); + tree1.setLeaf(13n, 13n); + await cache1.mergeIntoParent(); + + const cache2 = new SyncCachedLinkedLeafStore(cache1); + await cache2.preloadKeys([14n]); + + const tree2 = new LinkedMerkleTree(cache2.treeStore, cache2); + tree2.setLeaf(14n, 14n); + + const leaf = tree1.getLeaf(5n); + const leaf2 = tree2.getLeaf(14n); + + expectDefined(leaf); + expectDefined(leaf2); + + const storedLeaf5 = cache2.getLeaf(5n); + const storedLeaf10 = cache2.getLeaf(10n); + const storedLeaf11 = cache2.getLeaf(11n); + const storedLeaf12 = cache2.getLeaf(12n); + const storedLeaf13 = cache2.getLeaf(13n); + const storedLeaf14 = cache2.getLeaf(14n); + + expectDefined(storedLeaf5); + expectDefined(storedLeaf10); + expectDefined(storedLeaf11); + expectDefined(storedLeaf12); + expectDefined(storedLeaf13); + expectDefined(storedLeaf14); + + expect(storedLeaf5.index).toStrictEqual(1n); + expect(storedLeaf10.index).toStrictEqual(2n); + expect(storedLeaf11.index).toStrictEqual(3n); + expect(storedLeaf12.index).toStrictEqual(4n); + expect(storedLeaf13.index).toStrictEqual(5n); + expect(storedLeaf14.index).toStrictEqual(6n); + + // Check leaves were hashed properly when added to nodes/merkle-tree + expect(cache1.treeStore.getNode(storedLeaf5.index, 0)).toStrictEqual( + leaf.hash().toBigInt() + ); + expect(cache2.treeStore.getNode(storedLeaf14.index, 0)).toStrictEqual( + leaf2.hash().toBigInt() + ); + }); + + it("should preload through multiple levels and insert correctly at right index - harder", async () => { + await cache1.preloadKeys([10n, 100n, 200n, 300n, 400n, 500n]); + + tree1.setLeaf(10n, 10n); + tree1.setLeaf(100n, 100n); + tree1.setLeaf(200n, 200n); + tree1.setLeaf(300n, 300n); + tree1.setLeaf(400n, 400n); + tree1.setLeaf(500n, 500n); + + const cache2 = new SyncCachedLinkedLeafStore(cache1); + await cache2.preloadKeys([14n]); + const tree2 = new LinkedMerkleTree(cache2.treeStore, cache2); + tree2.setLeaf(14n, 14n); + + const leaf = tree1.getLeaf(5n); + const leaf2 = tree2.getLeaf(14n); + + expectDefined(leaf); + expectDefined(leaf2); + + const storedLeaf5 = cache2.getLeaf(5n); + const storedLeaf10 = cache2.getLeaf(10n); + const storedLeaf100 = cache2.getLeaf(100n); + const storedLeaf200 = cache2.getLeaf(200n); + const storedLeaf300 = cache2.getLeaf(300n); + const storedLeaf400 = cache2.getLeaf(400n); + const storedLeaf500 = cache2.getLeaf(500n); + const storedLeaf14 = cache2.getLeaf(14n); + + expectDefined(storedLeaf5); + expectDefined(storedLeaf10); + expectDefined(storedLeaf100); + expectDefined(storedLeaf200); + expectDefined(storedLeaf300); + expectDefined(storedLeaf400); + expectDefined(storedLeaf500); + expectDefined(storedLeaf14); + + expect(storedLeaf5.index).toStrictEqual(1n); + expect(storedLeaf10.index).toStrictEqual(2n); + expect(storedLeaf100.index).toStrictEqual(3n); + expect(storedLeaf200?.index).toStrictEqual(4n); + expect(storedLeaf300?.index).toStrictEqual(5n); + expect(storedLeaf400.index).toStrictEqual(6n); + expect(storedLeaf500.index).toStrictEqual(7n); + expect(storedLeaf14.index).toStrictEqual(8n); + + expect(cache1.treeStore.getNode(storedLeaf5.index, 0)).toStrictEqual( + leaf.hash().toBigInt() + ); + expect(cache2.treeStore.getNode(storedLeaf14.index, 0)).toStrictEqual( + leaf2.hash().toBigInt() + ); + expect(tree1.getRoot()).not.toEqual(tree2.getRoot()); + await cache2.mergeIntoParent(); + expect(tree1.getRoot()).toEqual(tree2.getRoot()); + }); + + it("mimic transaction execution service", async () => { + expect.assertions(18); + + const treeCache1 = new LinkedMerkleTree(cache1.treeStore, cache1); + await cache1.preloadKeys([10n, 20n]); + treeCache1.setLeaf(10n, 10n); + treeCache1.setLeaf(20n, 20n); + await cache1.mergeIntoParent(); + + const cache2 = new SyncCachedLinkedLeafStore(cache1); + const treeCache2 = new LinkedMerkleTree(cache2.treeStore, cache2); + await cache2.preloadKeys([7n]); + treeCache2.setLeaf(7n, 7n); + cache2.mergeIntoParent(); + + const leaves = await cache1.getLeavesAsync([0n, 5n, 7n, 10n, 20n]); + expectDefined(leaves[0]); + expectDefined(leaves[1]); + expectDefined(leaves[2]); + expectDefined(leaves[3]); + expectDefined(leaves[4]); + + expect(leaves[0]?.leaf).toEqual({ + value: 0n, + path: 0n, + nextPath: 5n, + }); + expect(leaves[1]?.leaf).toEqual({ + value: 10n, + path: 5n, + nextPath: 7n, + }); + expect(leaves[2]?.leaf).toEqual({ + value: 7n, + path: 7n, + nextPath: 10n, + }); + expect(leaves[3]?.leaf).toEqual({ + value: 10n, + path: 10n, + nextPath: 20n, + }); + expect(leaves[4]?.leaf).toEqual({ + value: 20n, + path: 20n, + nextPath: Field.ORDER - 1n, + }); + + const storedLeaf5 = cache1.getLeaf(5n); + const storedLeaf7 = cache1.getLeaf(7n); + const storedLeaf10 = cache1.getLeaf(10n); + const storedLeaf20 = cache1.getLeaf(20n); + + expectDefined(storedLeaf5); + await expect( + cache1.treeStore.getNodesAsync([{ key: storedLeaf5.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([Field(10), Field(5), Field(7)]).toBigInt(), + ]); + expectDefined(storedLeaf7); + await expect( + cache1.treeStore.getNodesAsync([{ key: storedLeaf7.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([Field(7), Field(7), Field(10)]).toBigInt(), + ]); + expectDefined(storedLeaf10); + await expect( + cache1.treeStore.getNodesAsync([{ key: storedLeaf10.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([Field(10), Field(10), Field(20)]).toBigInt(), + ]); + expectDefined(storedLeaf20); + await expect( + cache1.treeStore.getNodesAsync([{ key: storedLeaf20.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([Field(20), Field(20), Field(Field.ORDER - 1n)]).toBigInt(), + ]); + }); + + it("should cache correctly", async () => { + expect.assertions(15); + + const cache2 = new SyncCachedLinkedLeafStore(cache1); + const tree2 = new LinkedMerkleTree(cache2.treeStore, cache2); + + await cache2.preloadKeys([5n]); + const leaf1 = tree2.getLeaf(5n); + const storedLeaf1 = cache2.getLeaf(5n); + expectDefined(leaf1); + expectDefined(storedLeaf1); + await expect( + mainStore.treeStore.getNodesAsync([{ key: storedLeaf1.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([leaf1.value, leaf1.path, leaf1.nextPath]).toBigInt(), + ]); + + tree1.setLeaf(10n, 20n); + + const leaf2 = tree2.getLeaf(10n); + const storedLeaf2 = cache2.getLeaf(10n); + expectDefined(leaf2); + expectDefined(storedLeaf2); + expect(tree2.tree.getNode(0, storedLeaf2.index).toBigInt()).toBe( + Poseidon.hash([leaf2.value, leaf2.path, leaf2.nextPath]).toBigInt() + ); + + const witness = tree2.getReadWitness(5n); + + // We check tree1 and tree2 have same hash roots. + // The witness is from tree2, which comes from cache2, + // but which because of the sync is really just cache1. + expect( + witness.merkleWitness + .calculateRoot( + Poseidon.hash([ + witness.leaf.value, + witness.leaf.path, + witness.leaf.nextPath, + ]) + ) + .toString() + ).toBe(tree1.getRoot().toString()); + + expect( + witness.merkleWitness + .calculateRoot(Poseidon.hash([Field(11), Field(5n), Field(10n)])) + .toString() + ).not.toBe(tree1.getRoot().toString()); + + const witness2 = tree1.getReadWitness(10n); + + expect( + witness2.merkleWitness + .calculateRoot( + Poseidon.hash([ + Field(20), + Field(10n), + witness2.leaf.nextPath, // This is the maximum as the the leaf 10n should be the last + ]) + ) + .toString() + ).toBe(tree2.getRoot().toString()); + + tree2.setLeaf(15n, 30n); + + // Won't be the same as the tree2 works on cache2 and these changes don't + // carry up to cache1. Have to merge into parent for this. + expect(tree1.getRoot().toString()).not.toBe(tree2.getRoot().toString()); + + // After this the changes should be merged into the parents, i.e. cache1, + // which tree1 has access to. + cache2.mergeIntoParent(); + + const storedLeaf15 = cache2.getLeaf(15n); + const leaf15 = tree2.getLeaf(15n); + expectDefined(leaf15); + expectDefined(storedLeaf15); + expect(tree1.getRoot().toString()).toBe(tree2.getRoot().toString()); + expect(tree1.tree.getNode(0, storedLeaf15.index).toString()).toBe( + Poseidon.hash([leaf15.value, leaf15.path, leaf15.nextPath]).toString() + ); + + // Now the mainstore has the new 15n root. + await cache1.mergeIntoParent(); + + const cachedStore = await CachedLinkedLeafStore.new(mainStore); + await cachedStore.preloadKey(15n); + + expect( + new LinkedMerkleTree(cachedStore.treeStore, cachedStore) + .getRoot() + .toString() + ).toBe(tree2.getRoot().toString()); + }); + + it("mimic transaction execution service further", async () => { + expect.assertions(16); + + const mStore = new InMemoryAsyncLinkedLeafStore(); + const mCache = await CachedLinkedLeafStore.new(mStore); + const mCache2 = new SyncCachedLinkedLeafStore(mCache); + const treeCache1 = new LinkedMerkleTree(mCache.treeStore, mCache); + const treeCache2 = new LinkedMerkleTree(mCache2.treeStore, mCache2); + + await mCache.preloadKeys([5n]); + treeCache1.setLeaf(10n, 10n); + treeCache1.setLeaf(20n, 20n); + + await mCache2.preloadKeys([7n]); + treeCache2.setLeaf(7n, 7n); + mCache2.mergeIntoParent(); + + const leaves = await mCache.getLeavesAsync([0n, 7n, 10n, 20n]); + expectDefined(leaves[0]); + expectDefined(leaves[1]); + expectDefined(leaves[2]); + expectDefined(leaves[3]); + + expect(leaves[0]?.leaf).toEqual({ + value: 0n, + path: 0n, + nextPath: 7n, + }); + expect(leaves[1]?.leaf).toEqual({ + value: 7n, + path: 7n, + nextPath: 10n, + }); + expect(leaves[2]?.leaf).toEqual({ + value: 10n, + path: 10n, + nextPath: 20n, + }); + expect(leaves[3]?.leaf).toEqual({ + value: 20n, + path: 20n, + nextPath: Field.ORDER - 1n, + }); + + const storedLeaf0 = mCache.getLeaf(0n); + const storedLeaf7 = mCache.getLeaf(7n); + const storedLeaf10 = mCache.getLeaf(10n); + const storedLeaf20 = mCache.getLeaf(20n); + + expectDefined(storedLeaf0); + await expect( + mCache.treeStore.getNodesAsync([{ key: storedLeaf0.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([Field(0), Field(0), Field(7)]).toBigInt(), + ]); + expectDefined(storedLeaf7); + await expect( + mCache.treeStore.getNodesAsync([{ key: storedLeaf7.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([Field(7), Field(7), Field(10)]).toBigInt(), + ]); + expectDefined(storedLeaf10); + await expect( + mCache.treeStore.getNodesAsync([{ key: storedLeaf10.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([Field(10), Field(10), Field(20)]).toBigInt(), + ]); + expectDefined(storedLeaf20); + await expect( + mCache.treeStore.getNodesAsync([{ key: storedLeaf20.index, level: 0 }]) + ).resolves.toStrictEqual([ + Poseidon.hash([Field(20), Field(20), Field(Field.ORDER - 1n)]).toBigInt(), + ]); + }); + + it("mimic block production test && ST Prover", async () => { + // main store already has 0n and 5n paths defined. + // preloading 10n should load up 5n in the cache1 leaf and node stores. + await cache1.preloadKeys([10n, 10n, 10n]); + const state = tree1.getRoot(); + + // This is an insert as 10n is not already in the tree. + const witness1 = tree1.setLeaf(10n, 10n); + // This checks the right previous leaf was found. + expect( + witness1.leafPrevious.merkleWitness + .checkMembershipSimple( + state, + new LinkedLeafStruct({ + value: Field(witness1.leafPrevious.leaf.value), + path: Field(witness1.leafPrevious.leaf.path), + nextPath: Field(witness1.leafPrevious.leaf.nextPath), + }).hash() + ) + .toBoolean() + ).toStrictEqual(true); + + // We now look to the state after the prevLeaf is changed. + // The prev leaf should be the 5n. + const rootAfterFirstChange = + witness1.leafPrevious.merkleWitness.calculateRoot( + new LinkedLeafStruct({ + value: Field(witness1.leafPrevious.leaf.value), + path: Field(witness1.leafPrevious.leaf.path), + nextPath: Field(10n), + }).hash() + ); + expect( + witness1.leafCurrent.merkleWitness.calculateRoot(Field(0)).toBigInt() + ).toStrictEqual(rootAfterFirstChange.toBigInt()); + + // We now check that right hashing was done to get to the current root. + expect( + witness1.leafCurrent.merkleWitness + .checkMembershipSimple( + tree1.getRoot(), + new LinkedLeafStruct({ + value: Field(10n), + path: Field(10n), + nextPath: Field(witness1.leafPrevious.leaf.nextPath), + }).hash() + ) + .toBoolean() + ).toStrictEqual(true); + + // Now we update the node at 10n, + const witness2 = tree1.setLeaf(10n, 8n); + // We now check that right hashing was done to get to the current root. + expect( + witness2.leafCurrent.merkleWitness.calculateRoot( + new LinkedLeafStruct({ + value: Field(8n), + path: Field(10n), + nextPath: Field(witness2.leafCurrent.leaf.nextPath), + }).hash() + ) + ).toStrictEqual(tree1.getRoot()); + + // Now we update the node at 10n, again, + const witness3 = tree1.setLeaf(10n, 4n); + // We now check that right hashing was done to get to the current root. + expect( + witness3.leafCurrent.merkleWitness.calculateRoot( + new LinkedLeafStruct({ + value: Field(4n), + path: Field(10n), + nextPath: Field(witness2.leafCurrent.leaf.nextPath), + }).hash() + ) + ).toStrictEqual(tree1.getRoot()); + }); +}); diff --git a/packages/sequencer/test/merkle/CachedMerkleStore.test.ts b/packages/sequencer/test/merkle/CachedMerkleStore.test.ts index 4d882d7db..1b6cbf378 100644 --- a/packages/sequencer/test/merkle/CachedMerkleStore.test.ts +++ b/packages/sequencer/test/merkle/CachedMerkleStore.test.ts @@ -25,7 +25,7 @@ describe("cached merkle store", () => { tree1 = new RollupMerkleTree(cache1); }); - it("should cache multiple keys corretly", async () => { + it("should cache multiple keys correctly", async () => { expect.assertions(3); const cache2 = new CachedMerkleTreeStore(cache1); diff --git a/packages/sequencer/test/production/tracing/StateTransitionTracingService.test.ts b/packages/sequencer/test/production/tracing/StateTransitionTracingService.test.ts index e82d845d3..6c877194c 100644 --- a/packages/sequencer/test/production/tracing/StateTransitionTracingService.test.ts +++ b/packages/sequencer/test/production/tracing/StateTransitionTracingService.test.ts @@ -9,18 +9,18 @@ import { WitnessedRootHashList, } from "@proto-kit/protocol"; import { Bool, Field } from "o1js"; -import { mapSequential, RollupMerkleTree } from "@proto-kit/common"; +import { LinkedMerkleTree, mapSequential } from "@proto-kit/common"; import { toStateTransitionsHash } from "@proto-kit/module"; import { - CachedMerkleTreeStore, - InMemoryAsyncMerkleTreeStore, UntypedStateTransition, StateTransitionTracingService, TracingStateTransitionBatch, StateTransitionProofParameters, ConsoleTracer, + CachedLinkedLeafStore, } from "../../../src"; +import { InMemoryAsyncLinkedLeafStore } from "../../../src/storage/inmemory/InMemoryAsyncLinkedLeafStore"; function createST(obj: { path: string; @@ -52,19 +52,19 @@ function createSTSimple( async function applyBatchesToTree( batches: TracingStateTransitionBatch[], - cached: CachedMerkleTreeStore + cached: CachedLinkedLeafStore ) { const sts = batches .filter((x) => x.applied) .flatMap(({ stateTransitions }) => stateTransitions); - const tree = new RollupMerkleTree(cached); + const tree = new LinkedMerkleTree(cached.treeStore, cached); await mapSequential(sts, async (st) => { await cached.preloadKey(st.path.toBigInt()); if (st.to.isSome.toBoolean()) { - tree.setLeaf(st.path.toBigInt(), st.to.treeValue); + tree.setLeaf(st.path.toBigInt(), st.to.treeValue.toBigInt()); } }); @@ -162,12 +162,13 @@ describe("StateTransitionTracingService", () => { }); describe.each(cases)("tracing two chunks of STs", ({ batch, numSTs }) => { - const store = new InMemoryAsyncMerkleTreeStore(); - const cached = new CachedMerkleTreeStore(store); + const store = new InMemoryAsyncLinkedLeafStore(); let trace: StateTransitionProofParameters[]; beforeAll(async () => { + const cached = await CachedLinkedLeafStore.new(store); + trace = await service.createMerkleTrace(cached, batch); }); @@ -180,7 +181,7 @@ describe("StateTransitionTracingService", () => { it("should set second publicInput correctly", async () => { const tree = await applyBatchesToTree( batch.slice(0, 4), - new CachedMerkleTreeStore(store) + await CachedLinkedLeafStore.new(store) ); expect(trace[1].publicInput.root.toString()).toStrictEqual( @@ -215,7 +216,7 @@ describe("StateTransitionTracingService", () => { const witnessedRootsList = new WitnessedRootHashList(); const tempTree = await applyBatchesToTree( batch.slice(0, 2), - new CachedMerkleTreeStore(store) + await CachedLinkedLeafStore.new(store) ); witnessedRootsList.push({ @@ -234,12 +235,12 @@ describe("StateTransitionTracingService", () => { }); describe("tracing two separate sequences", () => { - const store = new InMemoryAsyncMerkleTreeStore(); - const cached = new CachedMerkleTreeStore(store); + const store = new InMemoryAsyncLinkedLeafStore(); + let cached: CachedLinkedLeafStore; let trace1: StateTransitionProofParameters[]; let trace2: StateTransitionProofParameters[]; - let tree1: RollupMerkleTree; + let tree1: LinkedMerkleTree; const batches: TracingStateTransitionBatch[][] = [ [ @@ -264,9 +265,10 @@ describe("StateTransitionTracingService", () => { ]; beforeAll(async () => { + cached = await CachedLinkedLeafStore.new(store); trace1 = await service.createMerkleTrace(cached, batches[0]); - const cached2 = new CachedMerkleTreeStore(store); + const cached2 = await CachedLinkedLeafStore.new(store); tree1 = await applyBatchesToTree(batches[0], cached2); trace2 = await service.createMerkleTrace(cached, batches[1]); @@ -295,8 +297,8 @@ describe("StateTransitionTracingService", () => { }); describe("should trace correctly", () => { - const store = new InMemoryAsyncMerkleTreeStore(); - const cached = new CachedMerkleTreeStore(store); + const store = new InMemoryAsyncLinkedLeafStore(); + let cached: CachedLinkedLeafStore; const batches: TracingStateTransitionBatch[] = [ { @@ -319,6 +321,7 @@ describe("StateTransitionTracingService", () => { let trace: StateTransitionProofParameters[]; beforeAll(async () => { + cached = await CachedLinkedLeafStore.new(store); trace = await service.createMerkleTrace(cached, batches); }); @@ -348,15 +351,15 @@ describe("StateTransitionTracingService", () => { expect(result).toBeDefined(); // Check that root matches - const tree = new RollupMerkleTree(cached); + const tree = new LinkedMerkleTree(cached.treeStore, cached); expect(result.root.toString()).toStrictEqual(tree.getRoot().toString()); }); }); it("check that STs have been applied to the tree store", async () => { - const tracedTree = new RollupMerkleTree(cached); + const tracedTree = new LinkedMerkleTree(cached.treeStore, cached); - const cached2 = new CachedMerkleTreeStore(store); + const cached2 = await CachedLinkedLeafStore.new(store); const tree = await applyBatchesToTree(batches, cached2); expect(tracedTree.getRoot().toString()).toStrictEqual( diff --git a/packages/sequencer/test/settlement/Settlement.ts b/packages/sequencer/test/settlement/Settlement.ts index 4fcda0d93..b6aa1fd22 100644 --- a/packages/sequencer/test/settlement/Settlement.ts +++ b/packages/sequencer/test/settlement/Settlement.ts @@ -2,7 +2,7 @@ import { expectDefined, mapSequential, TypedClass, - RollupMerkleTree, + LinkedMerkleTree, } from "@proto-kit/common"; import { VanillaProtocolModules } from "@proto-kit/library"; import { Runtime } from "@proto-kit/module"; @@ -487,8 +487,8 @@ export const settlementTestFn = ( const input = BlockProverPublicInput.fromFields( batch!.proof.publicInput.map((x) => Field(x)) ); - expect(input.stateRoot.toBigInt()).toStrictEqual( - RollupMerkleTree.EMPTY_ROOT + expect(input.stateRoot.toString()).toStrictEqual( + LinkedMerkleTree.EMPTY_ROOT.toString() ); const lastBlock = await blockQueue.getLatestBlockAndResult(); @@ -503,14 +503,14 @@ export const settlementTestFn = ( const { settlement } = settlementModule.getContracts(); expectDefined(lastBlock); expectDefined(lastBlock.result); - expect(settlement.networkStateHash.get().toBigInt()).toStrictEqual( - lastBlock!.result.afterNetworkState.hash().toBigInt() + expect(settlement.networkStateHash.get().toString()).toStrictEqual( + lastBlock!.result.afterNetworkState.hash().toString() ); - expect(settlement.stateRoot.get().toBigInt()).toStrictEqual( - lastBlock!.result.stateRoot + expect(settlement.stateRoot.get().toString()).toStrictEqual( + lastBlock!.result.stateRoot.toString() ); - expect(settlement.blockHashRoot.get().toBigInt()).toStrictEqual( - lastBlock!.result.blockHashRoot + expect(settlement.blockHashRoot.get().toString()).toStrictEqual( + lastBlock!.result.blockHashRoot.toString() ); } catch (e) { console.error(e); @@ -591,7 +591,7 @@ export const settlementTestFn = ( .sub(contractBalanceBefore); expect(actions).toHaveLength(1); - expect(balanceDiff.toBigInt()).toBe(depositAmount); + expect(balanceDiff.toString()).toBe(depositAmount.toString()); const [, batch] = await createBatch(false); @@ -632,7 +632,9 @@ export const settlementTestFn = ( const l2balanceDiff = balance.sub( userL2BalanceBefore ?? UInt64.from(0) ); - expect(l2balanceDiff.toBigInt()).toStrictEqual(depositAmount); + expect(l2balanceDiff.toString()).toStrictEqual( + depositAmount.toString() + ); } catch (e) { console.error(e); throw e; @@ -696,7 +698,9 @@ export const settlementTestFn = ( bridgingContract.deriveTokenId() ); - expect(account.balance.toBigInt()).toStrictEqual(BigInt(withdrawAmount)); + expect(account.balance.toString()).toStrictEqual( + withdrawAmount.toString() + ); }, timeout * 2 ); @@ -770,8 +774,8 @@ export const settlementTestFn = ( // tx fee const minaFees = BigInt(fee); - expect(balanceAfter - balanceBefore).toBe( - amount - (tokenConfig === undefined ? minaFees : 0n) + expect((balanceAfter - balanceBefore).toString()).toBe( + (amount - (tokenConfig === undefined ? minaFees : 0n)).toString() ); }, timeout diff --git a/packages/stack/src/scripts/graphql/server.ts b/packages/stack/src/scripts/graphql/server.ts index 3a7d50f21..1126077b4 100644 --- a/packages/stack/src/scripts/graphql/server.ts +++ b/packages/stack/src/scripts/graphql/server.ts @@ -36,7 +36,7 @@ import { GraphqlSequencerModule, GraphqlServer, MempoolResolver, - MerkleWitnessResolver, + LinkedMerkleWitnessResolver as MerkleWitnessResolver, NodeStatusResolver, QueryGraphqlModule, BlockResolver, diff --git a/packages/stack/test/graphql/graphql.test.ts b/packages/stack/test/graphql/graphql.test.ts index 007c045d3..04a84287e 100644 --- a/packages/stack/test/graphql/graphql.test.ts +++ b/packages/stack/test/graphql/graphql.test.ts @@ -170,8 +170,8 @@ describe("graphql client test", () => { expect(witness).toBeDefined(); // Check if this works, i.e. if it correctly parsed - expect(witness!.calculateRoot(Field(0)).toBigInt()).toBeGreaterThanOrEqual( - 0n - ); + expect( + witness!.merkleWitness.calculateRoot(Field(0)).toBigInt() + ).toBeGreaterThanOrEqual(0n); }); });