Skip to content

Commit

Permalink
feat: areCausallyRelated optimisation using BitSets (#133)
Browse files Browse the repository at this point in the history
  • Loading branch information
JanLewDev authored Sep 9, 2024
1 parent d4fc8c4 commit ae93d09
Show file tree
Hide file tree
Showing 6 changed files with 251 additions and 8 deletions.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@
"docs": "typedoc",
"proto-gen": "buf generate",
"release": "release-it",
"test": "vitest"
"test": "vitest",
"bench": "vitest bench"
},
"devDependencies": {
"@biomejs/biome": "^1.8.3",
Expand Down
84 changes: 84 additions & 0 deletions packages/object/src/hashgraph/bitset.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
/*
BitSet is associated with each vertex and is used to store the indices of the vertices that are reachable.
In other words, all the vertices causally before in the hashgraph.
When processing in the topologically ordered manner, we set the BitSet of the vertex to the bitwise OR of the BitSet of its dependencies.
Then, to check if two vertices are causally related, we check if the BitSet of the first vertex contains the index of the second vertex and vice-versa.
Algorithm for more optimal causality check inspired by https://stackoverflow.com/a/78133041
*/
export class BitSet {
private data: Uint32Array;

constructor(size = 1) {
// Always start with size 32
this.data = new Uint32Array(size);
}

clear(): void {
this.data = new Uint32Array(this.data.length);
}

set(index: number, value: boolean): void {
// (index / 32) | 0 is equivalent to Math.floor(index / 32)
const byteIndex = (index / 32) | 0;
const bitIndex = index % 32;
// if value is false, and with all 1s except the bit at bitIndex
if (value) this.data[byteIndex] |= 1 << bitIndex;
else this.data[byteIndex] &= ~(1 << bitIndex);
}

get(index: number): boolean {
// (index / 32) | 0 is equivalent to Math.floor(index / 32)
const byteIndex = (index / 32) | 0;
const bitIndex = index % 32;
return (this.data[byteIndex] & (1 << bitIndex)) !== 0;
}

flip(index: number): void {
// (index / 32) | 0 is equivalent to Math.floor(index / 32)
const byteIndex = (index / 32) | 0;
const bitIndex = index % 32;
this.data[byteIndex] ^= 1 << bitIndex;
}

// AND two bitsets of the same size
and(other: BitSet): BitSet {
const result = new BitSet(this.data.length);
for (let i = 0; i < this.data.length; i++) {
result.data[i] = this.data[i] & other.data[i];
}
return result;
}

// OR two bitsets of the same size
or(other: BitSet): BitSet {
const result = new BitSet(this.data.length);
for (let i = 0; i < this.data.length; i++) {
result.data[i] = this.data[i] | other.data[i];
}
return result;
}

// XOR two bitsets of the same size
xor(other: BitSet): BitSet {
const result = new BitSet(this.data.length);
for (let i = 0; i < this.data.length; i++) {
result.data[i] = this.data[i] ^ other.data[i];
}
return result;
}

not(): BitSet {
const result = new BitSet(this.data.length * 32);
for (let i = 0; i < this.data.length; i++) {
result.data[i] = ~this.data[i];
}
return result;
}

toString(): string {
return Array.from(this.data)
.reverse()
.map((int) => int.toString(2).padStart(32, "0"))
.join("");
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import * as crypto from "node:crypto";
import { BitSet } from "./bitset.js";

type Hash = string;
export type Hash = string;
export type Operation<T> = { type: string; value: T | null };

enum OperationType {
Expand Down Expand Up @@ -35,6 +36,11 @@ export class HashGraph<T> {
{ type: OperationType.NOP, value: null },
[],
);
private arePredecessorsFresh = false;
private reachablePredecessors: Map<Hash, BitSet> = new Map();
private topoSortedIndex: Map<Hash, number> = new Map();
// We start with a bitset of size 1, and double it every time we reach the limit
private currentBitsetSize = 1;

constructor(
nodeId: string,
Expand Down Expand Up @@ -81,6 +87,7 @@ export class HashGraph<T> {

const depsSet = new Set(deps);
this.frontier = this.frontier.filter((hash) => !depsSet.has(hash));
this.arePredecessorsFresh = false;
return hash;
}

Expand Down Expand Up @@ -119,14 +126,16 @@ export class HashGraph<T> {

const depsSet = new Set(deps);
this.frontier = this.frontier.filter((hash) => !depsSet.has(hash));

this.arePredecessorsFresh = false;
return hash;
}

// Time complexity: O(V + E), Space complexity: O(V)
topologicalSort(): Hash[] {
const result: Hash[] = [];
const visited = new Set<Hash>();
this.reachablePredecessors.clear();
this.topoSortedIndex.clear();

const visit = (hash: Hash) => {
if (visited.has(hash)) return;
Expand All @@ -141,8 +150,32 @@ export class HashGraph<T> {
};
// Start with the root vertex
visit(HashGraph.rootHash);
result.reverse();

// Double the size until it's enough to hold all the vertices
while (this.currentBitsetSize < result.length) this.currentBitsetSize *= 2;

for (let i = 0; i < result.length; i++) {
this.topoSortedIndex.set(result[i], i);
this.reachablePredecessors.set(
result[i],
new BitSet(this.currentBitsetSize),
);
for (const dep of this.vertices.get(result[i])?.dependencies || []) {
const depReachable = this.reachablePredecessors.get(dep);
depReachable?.set(this.topoSortedIndex.get(dep) || 0, true);
if (depReachable) {
const reachable = this.reachablePredecessors.get(result[i]);
this.reachablePredecessors.set(
result[i],
reachable?.or(depReachable) || depReachable,
);
}
}
}

return result.reverse();
this.arePredecessorsFresh = true;
return result;
}

linearizeOperations(): Operation<T>[] {
Expand All @@ -158,7 +191,7 @@ export class HashGraph<T> {
while (j < order.length) {
const moving = order[j];

if (!this.areCausallyRelated(anchor, moving)) {
if (!this.areCausallyRelatedUsingBitsets(anchor, moving)) {
const v1 = this.vertices.get(anchor);
const v2 = this.vertices.get(moving);
let action: ActionType;
Expand Down Expand Up @@ -200,8 +233,24 @@ export class HashGraph<T> {
return result;
}

// Amortised time complexity: O(1), Amortised space complexity: O(1)
areCausallyRelatedUsingBitsets(hash1: Hash, hash2: Hash): boolean {
if (!this.arePredecessorsFresh) {
this.topologicalSort();
}
const test1 =
this.reachablePredecessors
.get(hash1)
?.get(this.topoSortedIndex.get(hash2) || 0) || false;
const test2 =
this.reachablePredecessors
.get(hash2)
?.get(this.topoSortedIndex.get(hash1) || 0) || false;
return test1 || test2;
}

// Time complexity: O(V), Space complexity: O(V)
areCausallyRelated(hash1: Hash, hash2: Hash): boolean {
areCausallyRelatedUsingBFS(hash1: Hash, hash2: Hash): boolean {
const visited = new Set<Hash>();
const stack = [hash1];

Expand Down
4 changes: 2 additions & 2 deletions packages/object/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@ import {
HashGraph,
type Operation,
type Vertex,
} from "./hashgraph.js";
} from "./hashgraph/index.js";
import type { TopologyObjectBase } from "./proto/object_pb.js";
import { compileWasm } from "./wasm/compiler.js";

export * from "./proto/object_pb.js";
export * from "./hashgraph.js";
export * from "./hashgraph/index.js";

export interface CRO<T> {
resolveConflicts: (vertices: Vertex<T>[]) => ActionType;
Expand Down
38 changes: 38 additions & 0 deletions packages/object/tests/bitset.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import { beforeEach, describe, expect, test } from "vitest";
import { BitSet } from "../src/hashgraph/bitset.js";

describe("BitSet Test", () => {
let bitset: BitSet;

beforeEach(() => {
// Bitset of size 64
bitset = new BitSet(2);
});

test("Test: BitSet", () => {
bitset.set(0, true);
bitset.set(50, true);

expect(bitset.get(0)).toBe(true);
expect(bitset.get(49)).toBe(false);
expect(bitset.get(50)).toBe(true);

bitset.flip(49);
bitset.flip(50);
expect(bitset.get(49)).toBe(true);
expect(bitset.get(50)).toBe(false);

bitset.clear();

let other: BitSet = new BitSet(2);
other.set(0, true);
other = other.or(bitset);
expect(other.get(0)).toBe(true);

other.set(0, false);
expect(other.get(0)).toBe(false);

other = other.and(bitset);
expect(other.get(0)).toBe(false);
});
});
71 changes: 71 additions & 0 deletions packages/object/tests/causallyrelated.bench.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import test from "node:test";
import { beforeEach, bench, describe } from "vitest";
import { AddWinsSet } from "../../crdt/src/cros/AddWinsSet/index.js";
import {
type Hash,
type TopologyObject,
merge,
newTopologyObject,
} from "../src/index.js";

describe("AreCausallyDependent benchmark", async () => {
const samples = 100000;
const tests: Hash[][] = [];

const obj1 = await newTopologyObject("peer1", new AddWinsSet<number>());
const obj2 = await newTopologyObject("peer2", new AddWinsSet<number>());
const obj3 = await newTopologyObject("peer3", new AddWinsSet<number>());

const cro1 = obj1.cro as AddWinsSet<number>;
const cro2 = obj2.cro as AddWinsSet<number>;
const cro3 = obj3.cro as AddWinsSet<number>;

cro1.add(1);
merge(obj2, obj1.hashGraph.getAllVertices());

cro1.add(1);
cro1.remove(2);
cro2.remove(2);
cro2.add(2);

merge(obj3, obj1.hashGraph.getAllVertices());
cro3.add(3);
cro1.remove(1);

merge(obj1, obj2.hashGraph.getAllVertices());
cro1.remove(3);
cro2.remove(1);

merge(obj1, obj2.hashGraph.getAllVertices());
merge(obj1, obj3.hashGraph.getAllVertices());

const vertices = obj1.hashGraph.getAllVertices();
for (let i = 0; i < samples; i++) {
tests.push([
vertices[Math.floor(Math.random() * vertices.length)].hash,
vertices[Math.floor(Math.random() * vertices.length)].hash,
]);
}

bench("Causality check using BFS", async () => {
const cro1 = obj1.cro as AddWinsSet<number>;

for (let i = 0; i < samples; i++) {
const result = obj1.hashGraph.areCausallyRelatedUsingBFS(
tests[i][0],
tests[i][1],
);
}
});

bench("Causality check using Bitsets", async () => {
const cro1 = obj1.cro as AddWinsSet<number>;

for (let i = 0; i < samples; i++) {
const result = obj1.hashGraph.areCausallyRelatedUsingBitsets(
tests[i][0],
tests[i][1],
);
}
});
});

0 comments on commit ae93d09

Please sign in to comment.