From e6a622edebc1daa2ff4b4f3e2c47e6bb29a9cc48 Mon Sep 17 00:00:00 2001 From: HerbCaudill Date: Tue, 31 Oct 2023 17:32:08 +0100 Subject: [PATCH] NodeFSStorageAdapter: refactor --- .../src/index.ts | 113 ++++++++---------- 1 file changed, 48 insertions(+), 65 deletions(-) diff --git a/packages/automerge-repo-storage-nodefs/src/index.ts b/packages/automerge-repo-storage-nodefs/src/index.ts index f8299cacd..4cc1f44b1 100644 --- a/packages/automerge-repo-storage-nodefs/src/index.ts +++ b/packages/automerge-repo-storage-nodefs/src/index.ts @@ -1,28 +1,16 @@ /** - * A `StorageAdapter` which stores data in the local filesystem - * * @packageDocumentation + * A `StorageAdapter` which stores data in the local filesystem */ + import { StorageAdapter, type StorageKey } from "@automerge/automerge-repo" import fs from "fs" import path from "path" import { rimraf } from "rimraf" -const walkdir = async dirPath => - Promise.all( - await fs.promises.readdir(dirPath, { withFileTypes: true }).then(entries => - entries.map(entry => { - const childPath = path.join(dirPath, entry.name) - return entry.isDirectory() ? walkdir(childPath) : childPath - }) - ) - ) - export class NodeFSStorageAdapter extends StorageAdapter { private baseDirectory: string - private cache: { - [key: string]: { storageKey: StorageKey; data: Uint8Array } - } = {} + private cache: { [key: string]: Uint8Array } = {} /** * @param baseDirectory - The path to the directory to store data in. Defaults to "./automerge-repo-data". @@ -33,10 +21,8 @@ export class NodeFSStorageAdapter extends StorageAdapter { } async load(keyArray: StorageKey): Promise { - const key = cacheKey(keyArray) - if (this.cache[key]) { - return this.cache[key].data - } + const key = getKey(keyArray) + if (this.cache[key]) return this.cache[key] const filePath = this.getFilePath(keyArray) @@ -44,20 +30,18 @@ export class NodeFSStorageAdapter extends StorageAdapter { const fileContent = await fs.promises.readFile(filePath) return new Uint8Array(fileContent) } catch (error) { - if (error.code === "ENOENT") { - // file not found - return undefined - } else { - throw error - } + // don't throw if file not found + if (error.code === "ENOENT") return undefined + throw error } } async save(keyArray: StorageKey, binary: Uint8Array): Promise { - const key = cacheKey(keyArray) - this.cache[key] = { data: binary, storageKey: keyArray } + const key = getKey(keyArray) + this.cache[key] = binary const filePath = this.getFilePath(keyArray) + await fs.promises.mkdir(path.dirname(filePath), { recursive: true }) await fs.promises.writeFile(filePath, binary) } @@ -68,10 +52,8 @@ export class NodeFSStorageAdapter extends StorageAdapter { try { await fs.promises.unlink(filePath) } catch (error) { - if (error.code !== "ENOENT") { - // only throw if error is not file not found - throw error - } + // don't throw if file not found + if (error.code !== "ENOENT") throw error } } @@ -82,59 +64,42 @@ export class NodeFSStorageAdapter extends StorageAdapter { and could probably be simplified. */ const dirPath = this.getFilePath(keyPrefix) - const cacheKeyPrefixString = cacheKey(keyPrefix) - - // Get the list of all cached keys that match the prefix - const cachedKeys: string[] = Object.keys(this.cache).filter(key => - key.startsWith(cacheKeyPrefixString) - ) + const cachedKeys = this.cachedKeys(keyPrefix) // Read filenames from disk - let diskFiles - try { - diskFiles = await walkdir(dirPath) - } catch (error) { - if (error.code === "ENOENT") { - // Directory not found, initialize as empty - diskFiles = [] - } else { - throw error - } - } + const diskFiles = await walkdir(dirPath) // The "keys" in the cache don't include the baseDirectory. // We want to de-dupe with the cached keys so we'll use getKey to normalize them. - const diskKeys: string[] = diskFiles - .flat(Infinity) // the walk function returns a nested array - .map(fileName => - this.getKey([path.relative(this.baseDirectory, fileName)]) - ) + const diskKeys: string[] = diskFiles.map((fileName: string) => + getKey([path.relative(this.baseDirectory, fileName)]) + ) // Combine and deduplicate the lists of keys const allKeys = [...new Set([...cachedKeys, ...diskKeys])] // Load all files - return Promise.all( + const result = await Promise.all( allKeys.map(async keyString => { const key: StorageKey = keyString.split(path.sep) - return { - data: await this.load(key), - key, - } + const data = await this.load(key) + return { data, key } }) ) + + return result } async removeRange(keyPrefix: string[]): Promise { const dirPath = this.getFilePath(keyPrefix) - - // Warning: This method will recursively delete the directory and all its contents! - // Be absolutely sure this is what you want. await rimraf(dirPath) } - private getKey(key: StorageKey): string { - return path.join(...key) + private cachedKeys(keyPrefix: string[]): string[] { + const cacheKeyPrefixString = getKey(keyPrefix) + return Object.keys(this.cache).filter(key => + key.startsWith(cacheKeyPrefixString) + ) } private getFilePath(keyArray: string[]): string { @@ -149,6 +114,24 @@ export class NodeFSStorageAdapter extends StorageAdapter { } } -function cacheKey(key: StorageKey): string { - return path.join(...key) +// HELPERS + +const getKey = (key: StorageKey): string => path.join(...key) + +/** returns all files in a directory, recursively */ +const walkdir = async (dirPath: string): Promise => { + try { + const entries = await fs.promises.readdir(dirPath, { withFileTypes: true }) + const files = await Promise.all( + entries.map(entry => { + const subpath = path.resolve(dirPath, entry.name) + return entry.isDirectory() ? walkdir(subpath) : subpath + }) + ) + return files.flat() + } catch (error) { + // don't throw if directory not found + if (error.code === "ENOENT") return [] + throw error + } }