Skip to content

Commit

Permalink
NodeFSStorageAdapter: refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
HerbCaudill committed Oct 31, 2023
1 parent db156e8 commit e6a622e
Showing 1 changed file with 48 additions and 65 deletions.
113 changes: 48 additions & 65 deletions packages/automerge-repo-storage-nodefs/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,28 +1,16 @@
/**
* A `StorageAdapter` which stores data in the local filesystem
*
* @packageDocumentation
* A `StorageAdapter` which stores data in the local filesystem
*/

import { StorageAdapter, type StorageKey } from "@automerge/automerge-repo"
import fs from "fs"
import path from "path"
import { rimraf } from "rimraf"

const walkdir = async dirPath =>
Promise.all(
await fs.promises.readdir(dirPath, { withFileTypes: true }).then(entries =>
entries.map(entry => {
const childPath = path.join(dirPath, entry.name)
return entry.isDirectory() ? walkdir(childPath) : childPath
})
)
)

export class NodeFSStorageAdapter extends StorageAdapter {
private baseDirectory: string
private cache: {
[key: string]: { storageKey: StorageKey; data: Uint8Array }
} = {}
private cache: { [key: string]: Uint8Array } = {}

/**
* @param baseDirectory - The path to the directory to store data in. Defaults to "./automerge-repo-data".
Expand All @@ -33,31 +21,27 @@ export class NodeFSStorageAdapter extends StorageAdapter {
}

async load(keyArray: StorageKey): Promise<Uint8Array | undefined> {
const key = cacheKey(keyArray)
if (this.cache[key]) {
return this.cache[key].data
}
const key = getKey(keyArray)
if (this.cache[key]) return this.cache[key]

const filePath = this.getFilePath(keyArray)

try {
const fileContent = await fs.promises.readFile(filePath)
return new Uint8Array(fileContent)
} catch (error) {
if (error.code === "ENOENT") {
// file not found
return undefined
} else {
throw error
}
// don't throw if file not found
if (error.code === "ENOENT") return undefined
throw error
}
}

async save(keyArray: StorageKey, binary: Uint8Array): Promise<void> {
const key = cacheKey(keyArray)
this.cache[key] = { data: binary, storageKey: keyArray }
const key = getKey(keyArray)
this.cache[key] = binary

const filePath = this.getFilePath(keyArray)

await fs.promises.mkdir(path.dirname(filePath), { recursive: true })
await fs.promises.writeFile(filePath, binary)
}
Expand All @@ -68,10 +52,8 @@ export class NodeFSStorageAdapter extends StorageAdapter {
try {
await fs.promises.unlink(filePath)
} catch (error) {
if (error.code !== "ENOENT") {
// only throw if error is not file not found
throw error
}
// don't throw if file not found
if (error.code !== "ENOENT") throw error
}
}

Expand All @@ -82,59 +64,42 @@ export class NodeFSStorageAdapter extends StorageAdapter {
and could probably be simplified. */

const dirPath = this.getFilePath(keyPrefix)
const cacheKeyPrefixString = cacheKey(keyPrefix)

// Get the list of all cached keys that match the prefix
const cachedKeys: string[] = Object.keys(this.cache).filter(key =>
key.startsWith(cacheKeyPrefixString)
)
const cachedKeys = this.cachedKeys(keyPrefix)

// Read filenames from disk
let diskFiles
try {
diskFiles = await walkdir(dirPath)
} catch (error) {
if (error.code === "ENOENT") {
// Directory not found, initialize as empty
diskFiles = []
} else {
throw error
}
}
const diskFiles = await walkdir(dirPath)

// The "keys" in the cache don't include the baseDirectory.
// We want to de-dupe with the cached keys so we'll use getKey to normalize them.
const diskKeys: string[] = diskFiles
.flat(Infinity) // the walk function returns a nested array
.map(fileName =>
this.getKey([path.relative(this.baseDirectory, fileName)])
)
const diskKeys: string[] = diskFiles.map((fileName: string) =>
getKey([path.relative(this.baseDirectory, fileName)])
)

// Combine and deduplicate the lists of keys
const allKeys = [...new Set([...cachedKeys, ...diskKeys])]

// Load all files
return Promise.all(
const result = await Promise.all(
allKeys.map(async keyString => {
const key: StorageKey = keyString.split(path.sep)
return {
data: await this.load(key),
key,
}
const data = await this.load(key)
return { data, key }
})
)

return result
}

async removeRange(keyPrefix: string[]): Promise<void> {
const dirPath = this.getFilePath(keyPrefix)

// Warning: This method will recursively delete the directory and all its contents!
// Be absolutely sure this is what you want.
await rimraf(dirPath)
}

private getKey(key: StorageKey): string {
return path.join(...key)
private cachedKeys(keyPrefix: string[]): string[] {
const cacheKeyPrefixString = getKey(keyPrefix)
return Object.keys(this.cache).filter(key =>
key.startsWith(cacheKeyPrefixString)
)
}

private getFilePath(keyArray: string[]): string {
Expand All @@ -149,6 +114,24 @@ export class NodeFSStorageAdapter extends StorageAdapter {
}
}

function cacheKey(key: StorageKey): string {
return path.join(...key)
// HELPERS

const getKey = (key: StorageKey): string => path.join(...key)

/** returns all files in a directory, recursively */
const walkdir = async (dirPath: string): Promise<string[]> => {
try {
const entries = await fs.promises.readdir(dirPath, { withFileTypes: true })
const files = await Promise.all(
entries.map(entry => {
const subpath = path.resolve(dirPath, entry.name)
return entry.isDirectory() ? walkdir(subpath) : subpath
})
)
return files.flat()
} catch (error) {
// don't throw if directory not found
if (error.code === "ENOENT") return []
throw error
}
}

0 comments on commit e6a622e

Please sign in to comment.