From 000678d57241c9cf436ce7d2414633fab43c5683 Mon Sep 17 00:00:00 2001 From: HerbCaudill Date: Mon, 30 Oct 2023 15:45:30 +0100 Subject: [PATCH 1/6] storage: documentation & minor refactoring --- packages/automerge-repo/src/index.ts | 12 +- .../src/storage/StorageAdapter.ts | 53 ++--- .../src/storage/StorageSubsystem.ts | 202 +++++++++--------- .../src/storage/chunkTypeFromKey.ts | 22 ++ .../automerge-repo/src/storage/keyHash.ts | 17 ++ packages/automerge-repo/src/storage/types.ts | 39 ++++ 6 files changed, 216 insertions(+), 129 deletions(-) create mode 100644 packages/automerge-repo/src/storage/chunkTypeFromKey.ts create mode 100644 packages/automerge-repo/src/storage/keyHash.ts create mode 100644 packages/automerge-repo/src/storage/types.ts diff --git a/packages/automerge-repo/src/index.ts b/packages/automerge-repo/src/index.ts index b66f57655..a35e65c81 100644 --- a/packages/automerge-repo/src/index.ts +++ b/packages/automerge-repo/src/index.ts @@ -52,6 +52,7 @@ export type { DocHandleOutboundEphemeralMessagePayload, HandleState, } from "./DocHandle.js" + export type { DeleteDocumentPayload, DocumentPayload, @@ -59,12 +60,14 @@ export type { RepoEvents, SharePolicy, } from "./Repo.js" + export type { NetworkAdapterEvents, OpenPayload, PeerCandidatePayload, PeerDisconnectedPayload, } from "./network/NetworkAdapter.js" + export type { DocumentUnavailableMessage, EphemeralMessage, @@ -73,5 +76,12 @@ export type { RequestMessage, SyncMessage, } from "./network/messages.js" -export type { StorageKey } from "./storage/StorageAdapter.js" + +export type { + Chunk, + ChunkInfo, + ChunkType, + StorageKey, +} from "./storage/types.js" + export * from "./types.js" diff --git a/packages/automerge-repo/src/storage/StorageAdapter.ts b/packages/automerge-repo/src/storage/StorageAdapter.ts index 36671144d..60e846cdd 100644 --- a/packages/automerge-repo/src/storage/StorageAdapter.ts +++ b/packages/automerge-repo/src/storage/StorageAdapter.ts @@ -1,41 +1,34 @@ +import { StorageKey, Chunk } from "./types.js" + /** A storage adapter represents some way of storing binary data for a {@link Repo} * * @remarks - * `StorageAdapter`s are a little like a key/value store. The keys are arrays - * of strings ({@link StorageKey}) and the values are binary blobs. + * `StorageAdapter`s provide a key/value storage interface. The keys are arrays of strings + * ({@link StorageKey}) and the values are binary blobs. */ export abstract class StorageAdapter { - // load, store, or remove a single binary blob based on an array key - // automerge-repo mostly uses keys in the following form: - // [documentId, "snapshot"] or [documentId, "incremental", "0"] - // but the storage adapter is agnostic to the meaning of the key - // and we expect to store other data in the future such as syncstates - /** Load the single blob correspongind to `key` */ + /** Load the single value corresponding to `key` */ abstract load(key: StorageKey): Promise - /** save the blod `data` to the key `key` */ + + /** Save the value `data` to the key `key` */ abstract save(key: StorageKey, data: Uint8Array): Promise - /** remove the blob corresponding to `key` */ + + /** Remove the value corresponding to `key` */ abstract remove(key: StorageKey): Promise - // the keyprefix will match any key that starts with the given array - // for example, [documentId, "incremental"] will match all incremental saves - // or [documentId] will match all data for a given document - // be careful! this will also match [documentId, "syncState"]! - // (we aren't using this yet but keep it in mind.) - /** Load all blobs with keys that start with `keyPrefix` */ - abstract loadRange(keyPrefix: StorageKey): Promise<{key: StorageKey, data: Uint8Array}[]> - /** Remove all blobs with keys that start with `keyPrefix` */ + /** + * Load all values with keys that start with `keyPrefix`. + * + * @remarks + * The `keyprefix` will match any key that starts with the given array. For example: + * - `[documentId, "incremental"]` will match all incremental saves + * - `[documentId]` will match all data for a given document. + * + * Be careful! `[documentId]` would also match something like `[documentId, "syncState"]`! We + * aren't using this yet but keep it in mind.) + */ + abstract loadRange(keyPrefix: StorageKey): Promise + + /** Remove all values with keys that start with `keyPrefix` */ abstract removeRange(keyPrefix: StorageKey): Promise } - -/** The type of keys for a {@link StorageAdapter} - * - * @remarks - * Storage keys are arrays because they are hierarchical and the storage - * subsystem will need to be able to do range queries for all keys that - * have a particular prefix. For example, incremental changes for a given - * document might be stored under `[, "incremental", ]`. - * `StorageAdapter` implementations should not assume any particular structure - * though. - **/ -export type StorageKey = string[] diff --git a/packages/automerge-repo/src/storage/StorageSubsystem.ts b/packages/automerge-repo/src/storage/StorageSubsystem.ts index fdb95fd33..c3577dcde 100644 --- a/packages/automerge-repo/src/storage/StorageSubsystem.ts +++ b/packages/automerge-repo/src/storage/StorageSubsystem.ts @@ -1,46 +1,96 @@ import * as A from "@automerge/automerge/next" import debug from "debug" -import * as sha256 from "fast-sha256" import { headsAreSame } from "../helpers/headsAreSame.js" import { mergeArrays } from "../helpers/mergeArrays.js" import { type DocumentId } from "../types.js" -import { StorageAdapter, StorageKey } from "./StorageAdapter.js" - -// Metadata about a chunk of data loaded from storage. This is stored on the -// StorageSubsystem so when we are compacting we know what chunks we can safely delete -type StorageChunkInfo = { - key: StorageKey - type: ChunkType - size: number -} - -export type ChunkType = "snapshot" | "incremental" +import { StorageAdapter } from "./StorageAdapter.js" +import { ChunkInfo, StorageKey } from "./types.js" +import { keyHash, headsHash } from "./keyHash.js" +import { chunkTypeFromKey } from "./chunkTypeFromKey.js" + +/** + * The storage subsystem is responsible for saving and loading Automerge documents to and from + * storage adapter. It also provides a generic key/value storage interface for other uses. + */ +export class StorageSubsystem { + /** Record of the latest heads we've loaded or saved for each document */ + #storedHeads: Map = new Map() -function keyHash(binary: Uint8Array) { - const hash = sha256.hash(binary) - const hashArray = Array.from(new Uint8Array(hash)) // convert buffer to byte array - const hashHex = hashArray.map(b => ("00" + b.toString(16)).slice(-2)).join("") // convert bytes to hex string - return hashHex -} + /** Metadata on the chunks we've already loaded for each document */ + #chunkInfos: Map = new Map() -function headsHash(heads: A.Heads): string { - const encoder = new TextEncoder() - const headsbinary = mergeArrays(heads.map((h: string) => encoder.encode(h))) - return keyHash(headsbinary) -} + /** Flag to avoid compacting when a compaction is already underway */ + #compacting = false -export class StorageSubsystem { - #storageAdapter: StorageAdapter - #chunkInfos: Map = new Map() - #storedHeads: Map = new Map() #log = debug(`automerge-repo:storage-subsystem`) - #snapshotting = false + constructor(private storageAdapter: StorageAdapter) {} + + /** + * Loads the Automerge document with the given ID from storage. + */ + async loadDoc(documentId: DocumentId): Promise | null> { + // Load all the chunks for this document + const chunks = await this.storageAdapter.loadRange([documentId]) + const binaries = [] + const chunkInfos: ChunkInfo[] = [] + + for (const chunk of chunks) { + const chunkType = chunkTypeFromKey(chunk.key) + if (chunkType == null) continue + chunkInfos.push({ + key: chunk.key, + type: chunkType, + size: chunk.data.length, + }) + binaries.push(chunk.data) + } + this.#chunkInfos.set(documentId, chunkInfos) + + // Merge the chunks into a single binary + const binary = mergeArrays(binaries) + if (binary.length === 0) return null + + // Load into an Automerge document + const newDoc = A.loadIncremental(A.init(), binary) as A.Doc + + // Record the latest heads for the document + this.#storedHeads.set(documentId, A.getHeads(newDoc)) + + return newDoc + } + + /** + * Saves the provided Automerge document to storage. + * + * @remarks + * Under the hood this makes incremental saves until the incremental size is greater than the + * snapshot size, at which point the document is compacted into a single snapshot. + */ + async saveDoc(documentId: DocumentId, doc: A.Doc): Promise { + // Don't bother saving if the document hasn't changed + if (!this.#shouldSave(documentId, doc)) return + + const sourceChunks = this.#chunkInfos.get(documentId) ?? [] + if (this.#shouldCompact(sourceChunks)) { + void this.#saveTotal(documentId, doc, sourceChunks) + } else { + void this.#saveIncremental(documentId, doc) + } + this.#storedHeads.set(documentId, A.getHeads(doc)) + } - constructor(storageAdapter: StorageAdapter) { - this.#storageAdapter = storageAdapter + /** + * Removes the Automerge document with the given ID from storage + */ + async remove(documentId: DocumentId) { + void this.storageAdapter.removeRange([documentId, "snapshot"]) + void this.storageAdapter.removeRange([documentId, "incremental"]) } + /** + * Saves just the incremental changes since the last save. + */ async #saveIncremental( documentId: DocumentId, doc: A.Doc @@ -49,7 +99,7 @@ export class StorageSubsystem { if (binary && binary.length > 0) { const key = [documentId, "incremental", keyHash(binary)] this.#log(`Saving incremental ${key} for document ${documentId}`) - await this.#storageAdapter.save(key, binary) + await this.storageAdapter.save(key, binary) if (!this.#chunkInfos.has(documentId)) { this.#chunkInfos.set(documentId, []) } @@ -64,12 +114,16 @@ export class StorageSubsystem { } } + /** + * Compacts the document storage into a single shapshot. + */ async #saveTotal( documentId: DocumentId, doc: A.Doc, - sourceChunks: StorageChunkInfo[] + sourceChunks: ChunkInfo[] ): Promise { - this.#snapshotting = true + this.#compacting = true + const binary = A.save(doc) const snapshotHash = headsHash(A.getHeads(doc)) const key = [documentId, "snapshot", snapshotHash] @@ -80,81 +134,46 @@ export class StorageSubsystem { this.#log(`Saving snapshot ${key} for document ${documentId}`) this.#log(`deleting old chunks ${Array.from(oldKeys)}`) - await this.#storageAdapter.save(key, binary) + await this.storageAdapter.save(key, binary) for (const key of oldKeys) { - await this.#storageAdapter.remove(key) + await this.storageAdapter.remove(key) } + const newChunkInfos = this.#chunkInfos.get(documentId)?.filter(c => !oldKeys.has(c.key)) ?? [] newChunkInfos.push({ key, type: "snapshot", size: binary.length }) - this.#chunkInfos.set(documentId, newChunkInfos) - this.#snapshotting = false - } - - async loadDoc(documentId: DocumentId): Promise | null> { - const loaded = await this.#storageAdapter.loadRange([documentId]) - const binaries = [] - const chunkInfos: StorageChunkInfo[] = [] - for (const chunk of loaded) { - const chunkType = chunkTypeFromKey(chunk.key) - if (chunkType == null) { - continue - } - chunkInfos.push({ - key: chunk.key, - type: chunkType, - size: chunk.data.length, - }) - binaries.push(chunk.data) - } - this.#chunkInfos.set(documentId, chunkInfos) - const binary = mergeArrays(binaries) - if (binary.length === 0) { - return null - } - const newDoc = A.loadIncremental(A.init(), binary) - this.#storedHeads.set(documentId, A.getHeads(newDoc)) - return newDoc - } - async saveDoc(documentId: DocumentId, doc: A.Doc): Promise { - if (!this.#shouldSave(documentId, doc)) { - return - } - const sourceChunks = this.#chunkInfos.get(documentId) ?? [] - if (this.#shouldCompact(sourceChunks)) { - void this.#saveTotal(documentId, doc, sourceChunks) - } else { - void this.#saveIncremental(documentId, doc) - } - this.#storedHeads.set(documentId, A.getHeads(doc)) - } + this.#chunkInfos.set(documentId, newChunkInfos) - async remove(documentId: DocumentId) { - void this.#storageAdapter.removeRange([documentId, "snapshot"]) - void this.#storageAdapter.removeRange([documentId, "incremental"]) + this.#compacting = false } + /** + * Returns true if the document has changed since the last time it was saved. + */ #shouldSave(documentId: DocumentId, doc: A.Doc): boolean { const oldHeads = this.#storedHeads.get(documentId) if (!oldHeads) { + // we haven't saved this document before return true } const newHeads = A.getHeads(doc) if (headsAreSame(newHeads, oldHeads)) { + // the document hasn't changed return false } - return true + return true // the document has changed } - #shouldCompact(sourceChunks: StorageChunkInfo[]) { - if (this.#snapshotting) { - return false - } - // compact if the incremental size is greater than the snapshot size + /** + * We only compact if the incremental size is greater than the snapshot size. + */ + #shouldCompact(sourceChunks: ChunkInfo[]) { + if (this.#compacting) return false + let snapshotSize = 0 let incrementalSize = 0 for (const chunk of sourceChunks) { @@ -167,16 +186,3 @@ export class StorageSubsystem { return incrementalSize >= snapshotSize } } - -function chunkTypeFromKey(key: StorageKey): ChunkType | null { - if (key.length < 2) { - return null - } - const chunkTypeStr = key[key.length - 2] - if (chunkTypeStr === "snapshot" || chunkTypeStr === "incremental") { - const chunkType: ChunkType = chunkTypeStr - return chunkType - } else { - return null - } -} diff --git a/packages/automerge-repo/src/storage/chunkTypeFromKey.ts b/packages/automerge-repo/src/storage/chunkTypeFromKey.ts new file mode 100644 index 000000000..5214443fe --- /dev/null +++ b/packages/automerge-repo/src/storage/chunkTypeFromKey.ts @@ -0,0 +1,22 @@ +import { StorageKey } from "./types.js" +import { ChunkType } from "./types.js" + +/** + * Keys for storing Automerge documents are of the form: + * ```ts + * [documentId, "snapshot", hash] // OR + * [documentId, "incremental", hash] + * ``` + * This function returns the chunk type ("snapshot" or "incremental") if the key is in one of these + * forms. + */ +export function chunkTypeFromKey(key: StorageKey): ChunkType | null { + if (key.length < 2) return null + + const chunkTypeStr = key[key.length - 2] // next-to-last element in key + if (chunkTypeStr === "snapshot" || chunkTypeStr === "incremental") { + return chunkTypeStr as ChunkType + } + + return null +} diff --git a/packages/automerge-repo/src/storage/keyHash.ts b/packages/automerge-repo/src/storage/keyHash.ts new file mode 100644 index 000000000..b35ec5ac1 --- /dev/null +++ b/packages/automerge-repo/src/storage/keyHash.ts @@ -0,0 +1,17 @@ +import * as A from "@automerge/automerge/next" +import * as sha256 from "fast-sha256" +import { mergeArrays } from "../helpers/mergeArrays.js" + +export function keyHash(binary: Uint8Array) { + // calculate hash + const hash = sha256.hash(binary) + return bufferToHexString(hash) +} +export function headsHash(heads: A.Heads): string { + const encoder = new TextEncoder() + const headsbinary = mergeArrays(heads.map((h: string) => encoder.encode(h))) + return keyHash(headsbinary) +} +function bufferToHexString(data: Uint8Array) { + return Array.from(data, byte => byte.toString(16).padStart(2, "0")).join("") +} diff --git a/packages/automerge-repo/src/storage/types.ts b/packages/automerge-repo/src/storage/types.ts new file mode 100644 index 000000000..a2c0f6628 --- /dev/null +++ b/packages/automerge-repo/src/storage/types.ts @@ -0,0 +1,39 @@ +/** + * A chunk is a snapshot or incremental change that is stored in a {@link StorageAdapter}. + */ +export type Chunk = { + key: StorageKey + data: Uint8Array +} + +/** + * Metadata about a chunk of data loaded from storage. This is stored on the StorageSubsystem so + * when we are compacting we know what chunks we can safely delete. + */ +export type ChunkInfo = { + key: StorageKey + type: ChunkType + size: number +} + +export type ChunkType = "snapshot" | "incremental" + +/** + * A storage key is an array of strings that represents a path to a value in a + * {@link StorageAdapter}. + * + * @remarks + * Storage keys are arrays because they are hierarchical and they allow the storage subsystem to do + * range queries for all keys that have a particular prefix. For example, incremental changes for a + * given document might be stored under `[, "incremental", ]`. + * + * automerge-repo mostly uses keys in the following form: + * ```ts + * [documentId, "snapshot", hash] // OR + * [documentId, "incremental", hash] + * ``` + * + * However, the storage adapter implementation should be agnostic to the meaning of the key and + * should not assume any particular structure. + **/ +export type StorageKey = string[] From e907ecf4ad6f5dabdd6014d75847314ddde0e18b Mon Sep 17 00:00:00 2001 From: HerbCaudill Date: Mon, 30 Oct 2023 19:22:52 +0100 Subject: [PATCH 2/6] StorageSubsystem: rename `remove` to `removeDoc` for consistency --- packages/automerge-repo/src/Repo.ts | 2 +- packages/automerge-repo/src/storage/StorageSubsystem.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/automerge-repo/src/Repo.ts b/packages/automerge-repo/src/Repo.ts index 623bc84d3..403d5ae5f 100644 --- a/packages/automerge-repo/src/Repo.ts +++ b/packages/automerge-repo/src/Repo.ts @@ -106,7 +106,7 @@ export class Repo extends EventEmitter { // synchronizer.removeDocument(documentId) if (storageSubsystem) { - storageSubsystem.remove(documentId).catch(err => { + storageSubsystem.removeDoc(documentId).catch(err => { this.#log("error deleting document", { documentId, err }) }) } diff --git a/packages/automerge-repo/src/storage/StorageSubsystem.ts b/packages/automerge-repo/src/storage/StorageSubsystem.ts index c3577dcde..2f1e158f1 100644 --- a/packages/automerge-repo/src/storage/StorageSubsystem.ts +++ b/packages/automerge-repo/src/storage/StorageSubsystem.ts @@ -83,7 +83,7 @@ export class StorageSubsystem { /** * Removes the Automerge document with the given ID from storage */ - async remove(documentId: DocumentId) { + async removeDoc(documentId: DocumentId) { void this.storageAdapter.removeRange([documentId, "snapshot"]) void this.storageAdapter.removeRange([documentId, "incremental"]) } From 4e693be01e6670621cfe657450ec3965a9378be8 Mon Sep 17 00:00:00 2001 From: HerbCaudill Date: Tue, 31 Oct 2023 16:14:12 +0100 Subject: [PATCH 3/6] StorageSubsystem: add arbitrary key/value storage --- .../src/storage/StorageSubsystem.ts | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/packages/automerge-repo/src/storage/StorageSubsystem.ts b/packages/automerge-repo/src/storage/StorageSubsystem.ts index 2f1e158f1..a5501f5a8 100644 --- a/packages/automerge-repo/src/storage/StorageSubsystem.ts +++ b/packages/automerge-repo/src/storage/StorageSubsystem.ts @@ -26,6 +26,57 @@ export class StorageSubsystem { constructor(private storageAdapter: StorageAdapter) {} + // ARBITRARY KEY/VALUE STORAGE + + // The `load`, `save`, and `remove` methods are for generic key/value storage, as opposed to + // Automerge documents. For example, they're used by the LocalFirstAuthProvider to persist the + // encrypted team graph that encodes group membership and permissions. + // + // The namespace parameter is to prevent collisions with other users of the storage subsystem. + // Typically this will be the name of the plug-in, adapter, or other system that is using it. For + // example, the LocalFirstAuthProvider uses the namespace `LocalFirstAuthProvider`. + + /** Loads a value from storage. */ + async load( + /** Namespace to prevent collisions with other users of the storage subsystem. */ + namespace: string, + + /** Key to load. Typically a UUID or other unique identifier, but could be any string. */ + key: string + ): Promise { + const storageKey = [namespace, key] as StorageKey + return await this.storageAdapter.load(storageKey) + } + + /** Saves a value in storage. */ + async save( + /** Namespace to prevent collisions with other users of the storage subsystem. */ + namespace: string, + + /** Key to load. Typically a UUID or other unique identifier, but could be any string. */ + key: string, + + /** Data to save, as a binary blob. */ + data: Uint8Array + ): Promise { + const storageKey = [namespace, key] as StorageKey + await this.storageAdapter.save(storageKey, data) + } + + /** Removes a value from storage. */ + async remove( + /** Namespace to prevent collisions with other users of the storage subsystem. */ + namespace: string, + + /** Key to remove. Typically a UUID or other unique identifier, but could be any string. */ + key: string + ): Promise { + const storageKey = [namespace, key] as StorageKey + await this.storageAdapter.remove(storageKey) + } + + // AUTOMERGE DOCUMENT STORAGE + /** * Loads the Automerge document with the given ID from storage. */ From 37d9fa414d51c67a88eb19af5c3aa59ab83f6a02 Mon Sep 17 00:00:00 2001 From: HerbCaudill Date: Mon, 30 Oct 2023 15:52:13 +0100 Subject: [PATCH 4/6] StorageSubsystem.test: apply both tests to both adapters --- .../test/StorageSubsystem.test.ts | 46 +++++++++---------- 1 file changed, 21 insertions(+), 25 deletions(-) diff --git a/packages/automerge-repo/test/StorageSubsystem.test.ts b/packages/automerge-repo/test/StorageSubsystem.test.ts index 2ea86952c..b719fc9c2 100644 --- a/packages/automerge-repo/test/StorageSubsystem.test.ts +++ b/packages/automerge-repo/test/StorageSubsystem.test.ts @@ -36,38 +36,34 @@ describe("StorageSubsystem", () => { // check that it's the same doc assert.deepStrictEqual(reloadedDoc, doc) }) - }) - }) - it("correctly stores incremental changes following a load", async () => { - const adapter = new DummyStorageAdapter() - const storage = new StorageSubsystem(adapter) - - const doc = A.change(A.init(), "test", d => { - d.foo = "bar" - }) + it("correctly stores incremental changes following a load", async () => { + const storage = new StorageSubsystem(adapter) - // save it to storage - const key = parseAutomergeUrl(generateAutomergeUrl()).documentId - storage.saveDoc(key, doc) + const doc = A.change(A.init(), "test", d => { + d.foo = "bar" + }) - // create new storage subsystem to simulate a new process - const storage2 = new StorageSubsystem(adapter) + // save it to storage + const key = parseAutomergeUrl(generateAutomergeUrl()).documentId + storage.saveDoc(key, doc) - // reload it from storage - const reloadedDoc = await storage2.loadDoc(key) + // create new storage subsystem to simulate a new process + const storage2 = new StorageSubsystem(adapter) - assert(reloadedDoc, "doc should be loaded") + // reload it from storage + const reloadedDoc = await storage2.loadDoc(key) - // make a change - const changedDoc = A.change(reloadedDoc, "test 2", d => { - d.foo = "baz" - }) + assert(reloadedDoc, "doc should be loaded") - // save it to storage - storage2.saveDoc(key, changedDoc) + // make a change + const changedDoc = A.change(reloadedDoc, "test 2", d => { + d.foo = "baz" + }) - // check that the storage adapter contains the correct keys - assert(adapter.keys().some(k => k.startsWith(`${key}.incremental.`))) + // save it to storage + storage2.saveDoc(key, changedDoc) + }) + }) }) }) From 371ff592865a37f0b8ea4c99e70b588a2eba96cd Mon Sep 17 00:00:00 2001 From: HerbCaudill Date: Tue, 31 Oct 2023 16:15:28 +0100 Subject: [PATCH 5/6] StorageSubsystem.test: add tests for key/value storage --- .../test/StorageSubsystem.test.ts | 90 ++++++++++++++++++- 1 file changed, 89 insertions(+), 1 deletion(-) diff --git a/packages/automerge-repo/test/StorageSubsystem.test.ts b/packages/automerge-repo/test/StorageSubsystem.test.ts index b719fc9c2..4d811cbf0 100644 --- a/packages/automerge-repo/test/StorageSubsystem.test.ts +++ b/packages/automerge-repo/test/StorageSubsystem.test.ts @@ -8,6 +8,8 @@ import { describe, it } from "vitest" import { generateAutomergeUrl, parseAutomergeUrl } from "../src/AutomergeUrl.js" import { StorageSubsystem } from "../src/storage/StorageSubsystem.js" import { DummyStorageAdapter } from "./helpers/DummyStorageAdapter.js" +import { cbor } from "../src/index.js" +import { pause } from "../src/helpers/pause.js" const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "automerge-repo-tests")) @@ -19,7 +21,8 @@ describe("StorageSubsystem", () => { Object.entries(adaptersToTest).forEach(([adapterName, adapter]) => { describe(adapterName, () => { - it("can store and retrieve an Automerge document", async () => { + describe("Automerge document storage", () => { + it("stores and retrieves an Automerge document", async () => { const storage = new StorageSubsystem(adapter) const doc = A.change(A.init(), "test", d => { @@ -63,6 +66,91 @@ describe("StorageSubsystem", () => { // save it to storage storage2.saveDoc(key, changedDoc) + }) + + it("removes an Automerge document", async () => { + const storage = new StorageSubsystem(adapter) + + const doc = A.change(A.init(), "test", d => { + d.foo = "bar" + }) + + // save it to storage + const key = parseAutomergeUrl(generateAutomergeUrl()).documentId + await storage.saveDoc(key, doc) + + // reload it from storage + const reloadedDoc = await storage.loadDoc(key) + + // check that it's the same doc + assert.deepStrictEqual(reloadedDoc, doc) + + // remove it + await storage.removeDoc(key) + + // reload it from storage + const reloadedDoc2 = await storage.loadDoc(key) + + // check that it's undefined + assert.equal(reloadedDoc2, undefined) + }) + }) + + describe("Arbitrary key/value storage", () => { + it("stores and retrieves a blob", async () => { + const storage = new StorageSubsystem(adapter) + + const value = cbor.encode({ foo: "bar" }) + + const namespace = "MyCoolAdapter" + const key = "ABC123" + await storage.save(namespace, key, value) + + const reloadedValue = await storage.load(namespace, key) + assert.notEqual(reloadedValue, undefined) + assert.deepEqual(cbor.decode(reloadedValue)["foo"], "bar") + }) + + it("keeps namespaces separate", async () => { + const storage = new StorageSubsystem(adapter) + + const key = "ABC123" + + const namespace1 = "MyCoolAdapter" + const value1 = cbor.encode({ foo: "bar" }) + await storage.save(namespace1, key, value1) + + const namespace2 = "SomeDumbAdapter" + const value2 = cbor.encode({ baz: "pizza" }) + await storage.save(namespace2, key, value2) + + const reloadedValue1 = await storage.load(namespace1, key) + assert.notEqual(reloadedValue1, undefined) + assert.deepEqual(cbor.decode(reloadedValue1)["foo"], "bar") + + const reloadedValue2 = await storage.load(namespace2, key) + assert.notEqual(reloadedValue2, undefined) + assert.deepEqual(cbor.decode(reloadedValue2)["baz"], "pizza") + }) + + it("removes a blob", async () => { + const storage = new StorageSubsystem(adapter) + + const value = cbor.encode({ foo: "bar" }) + + const namespace = "MyCoolAdapter" + const key = "ABC123" + await storage.save(namespace, key, value) + + const reloadedValue = await storage.load(namespace, key) + assert.notEqual(reloadedValue, undefined) + assert.deepEqual(cbor.decode(reloadedValue)["foo"], "bar") + + await storage.remove(namespace, key) + + const reloadedValue2 = await storage.load(namespace, key) + assert.equal(reloadedValue2, undefined) + }) }) }) }) From 2cb5779e960aa48922297661ba7d125d40ab6e6b Mon Sep 17 00:00:00 2001 From: HerbCaudill Date: Wed, 1 Nov 2023 12:27:17 +0100 Subject: [PATCH 6/6] StorageSubsystem: #storageAdapter --- .../src/storage/StorageSubsystem.ts | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/packages/automerge-repo/src/storage/StorageSubsystem.ts b/packages/automerge-repo/src/storage/StorageSubsystem.ts index a5501f5a8..1bd25a914 100644 --- a/packages/automerge-repo/src/storage/StorageSubsystem.ts +++ b/packages/automerge-repo/src/storage/StorageSubsystem.ts @@ -13,6 +13,9 @@ import { chunkTypeFromKey } from "./chunkTypeFromKey.js" * storage adapter. It also provides a generic key/value storage interface for other uses. */ export class StorageSubsystem { + /** The storage adapter to use for saving and loading documents */ + #storageAdapter: StorageAdapter + /** Record of the latest heads we've loaded or saved for each document */ #storedHeads: Map = new Map() @@ -24,7 +27,9 @@ export class StorageSubsystem { #log = debug(`automerge-repo:storage-subsystem`) - constructor(private storageAdapter: StorageAdapter) {} + constructor(storageAdapter: StorageAdapter) { + this.#storageAdapter = storageAdapter + } // ARBITRARY KEY/VALUE STORAGE @@ -45,7 +50,7 @@ export class StorageSubsystem { key: string ): Promise { const storageKey = [namespace, key] as StorageKey - return await this.storageAdapter.load(storageKey) + return await this.#storageAdapter.load(storageKey) } /** Saves a value in storage. */ @@ -60,7 +65,7 @@ export class StorageSubsystem { data: Uint8Array ): Promise { const storageKey = [namespace, key] as StorageKey - await this.storageAdapter.save(storageKey, data) + await this.#storageAdapter.save(storageKey, data) } /** Removes a value from storage. */ @@ -72,7 +77,7 @@ export class StorageSubsystem { key: string ): Promise { const storageKey = [namespace, key] as StorageKey - await this.storageAdapter.remove(storageKey) + await this.#storageAdapter.remove(storageKey) } // AUTOMERGE DOCUMENT STORAGE @@ -82,7 +87,7 @@ export class StorageSubsystem { */ async loadDoc(documentId: DocumentId): Promise | null> { // Load all the chunks for this document - const chunks = await this.storageAdapter.loadRange([documentId]) + const chunks = await this.#storageAdapter.loadRange([documentId]) const binaries = [] const chunkInfos: ChunkInfo[] = [] @@ -135,8 +140,8 @@ export class StorageSubsystem { * Removes the Automerge document with the given ID from storage */ async removeDoc(documentId: DocumentId) { - void this.storageAdapter.removeRange([documentId, "snapshot"]) - void this.storageAdapter.removeRange([documentId, "incremental"]) + void this.#storageAdapter.removeRange([documentId, "snapshot"]) + void this.#storageAdapter.removeRange([documentId, "incremental"]) } /** @@ -150,7 +155,7 @@ export class StorageSubsystem { if (binary && binary.length > 0) { const key = [documentId, "incremental", keyHash(binary)] this.#log(`Saving incremental ${key} for document ${documentId}`) - await this.storageAdapter.save(key, binary) + await this.#storageAdapter.save(key, binary) if (!this.#chunkInfos.has(documentId)) { this.#chunkInfos.set(documentId, []) } @@ -185,10 +190,10 @@ export class StorageSubsystem { this.#log(`Saving snapshot ${key} for document ${documentId}`) this.#log(`deleting old chunks ${Array.from(oldKeys)}`) - await this.storageAdapter.save(key, binary) + await this.#storageAdapter.save(key, binary) for (const key of oldKeys) { - await this.storageAdapter.remove(key) + await this.#storageAdapter.remove(key) } const newChunkInfos =