Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add metadata api #267

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
118 changes: 108 additions & 10 deletions packages/automerge-repo/src/DocHandle.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ export class DocHandle<T> //
#machine: DocHandleXstateMachine<T>
#timeoutDelay: number
#remoteHeads: Record<StorageId, A.Heads> = {}
#changeMetadata: ChangeMetadataFunction

/** The URL of this document
*
Expand All @@ -54,18 +55,31 @@ export class DocHandle<T> //
/** @hidden */
constructor(
public documentId: DocumentId,
{ isNew = false, timeoutDelay = 60_000 }: DocHandleOptions = {}
{
timeoutDelay = 60_000,
changeMetadata: changeMetadataFunction = () => undefined,
init = false,
}: DocHandleOptions<T> = {}
) {
super()
this.#timeoutDelay = timeoutDelay
this.#changeMetadata = changeMetadataFunction
this.#log = debug(`automerge-repo:dochandle:${this.documentId.slice(0, 5)}`)

// initial doc
let doc = A.init<T>()

// Make an empty change so that we have something to save to disk
if (isNew) {
doc = A.emptyChange(doc, {})
if (init) {
const options = init === true ? {} : init

doc = A.emptyChange(
doc,
optionsWithGlobalMetadata(
options,
this.#changeMetadata(this.documentId) ?? {}
)
)
}

/**
Expand Down Expand Up @@ -217,7 +231,7 @@ export class DocHandle<T> //
})
.start()

this.#machine.send(isNew ? CREATE : FIND)
this.#machine.send(init ? CREATE : FIND)
}

// PRIVATE
Expand Down Expand Up @@ -340,7 +354,7 @@ export class DocHandle<T> //
}

/** `change` is called by the repo when the document is changed locally */
change(callback: A.ChangeFn<T>, options: A.ChangeOptions<T> = {}) {
change(callback: A.ChangeFn<T>, options: DocHandleChangeOptions<T> = {}) {
if (!this.isReady()) {
throw new Error(
`DocHandle#${this.documentId} is not ready. Check \`handle.isReady()\` before accessing the document.`
Expand All @@ -349,7 +363,14 @@ export class DocHandle<T> //
this.#machine.send(UPDATE, {
payload: {
callback: (doc: A.Doc<T>) => {
return A.change(doc, options, callback)
return A.change(
doc,
optionsWithGlobalMetadata(
options,
this.#changeMetadata(this.documentId) ?? {}
),
callback
)
},
},
})
Expand All @@ -362,7 +383,7 @@ export class DocHandle<T> //
changeAt(
heads: A.Heads,
callback: A.ChangeFn<T>,
options: A.ChangeOptions<T> = {}
options: DocHandleChangeOptions<T> = {}
): string[] | undefined {
if (!this.isReady()) {
throw new Error(
Expand All @@ -373,7 +394,15 @@ export class DocHandle<T> //
this.#machine.send(UPDATE, {
payload: {
callback: (doc: A.Doc<T>) => {
const result = A.changeAt(doc, heads, options, callback)
const result = A.changeAt(
doc,
heads,
optionsWithGlobalMetadata(
options,
this.#changeMetadata(this.documentId) ?? {}
),
callback
)
resultHeads = result.newHeads
return result.newDoc
},
Expand Down Expand Up @@ -448,14 +477,83 @@ export class DocHandle<T> //
}
}

function optionsWithGlobalMetadata<T>(
options: DocHandleChangeOptions<T>,
globalMetadata: ChangeMetadata
): A.ChangeOptions<T> {
const mergedMetadata: MergedMetadata = { metadata: {} }

mergeMetadata(mergedMetadata, globalMetadata)

if (options.metadata) {
mergeMetadata(mergedMetadata, options.metadata)
}

const { metadata, time } = mergedMetadata

return {
time,
message:
Object.values(metadata).length > 0 ? JSON.stringify(metadata) : undefined,
patchCallback: options.patchCallback,
}
}

function mergeMetadata(target: MergedMetadata, metadata: ChangeMetadata) {
for (const [key, value] of Object.entries(metadata)) {
const type = typeof value

// remove time from metadata, because it can be stored more effiently as a time delta
// this will be no longer necessary once we have proper metadata support
if (key === "time" && type === "number") {
target.time = value as number
continue
}

if (type !== "number" && type !== "string" && type !== "boolean") {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We can also allow Uint8Array here.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Although I guess we currently can't serialize that to JSON in a nice way so maybe we leave that for the future.

throw new Error(
`Only primive values "number", "string" and "boolean" are allowed in metadata`
)
}

target.metadata[key] = value
}
}

interface MergedMetadata {
metadata: ChangeMetadata
time?: number
}

// WRAPPER CLASS TYPES

/** @hidden */
export interface DocHandleOptions {
isNew?: boolean
export interface DocHandleOptions<T> {
timeoutDelay?: number
changeMetadata?: ChangeMetadataFunction
// set init to true or pass in initialization options to create a new empty document
init?: boolean | DocHandleChangeOptions<T>
}

// todo: remove this type once we have real metadata on changes in automerge
// as an interim solution we use the message attribute to store the metadata as a JSON string
export interface DocHandleChangeOptions<T> {
metadata?: ChangeMetadata
patchCallback?: A.PatchCallback<T>
}

export type ChangeMetadata = Record<string, number | string | boolean>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looking ahead to how we compress this I am not sure this API will do everything we need. When compressing this metadata we don't store the names of the fields, but instead an integer column ID. This means that the application will need to provide some mapping from a column ID to the name of the field in the metadata object.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I’d suggest that for app-defined metadata columns we identify the columns by name+type, rather than by a numeric column ID. That would simplify the API and only cost a few bytes more space. The question is how the type should be identified in the API. With a non-null value we could check if the value is an integer, string, or byte array, and assign it to the appropriate typed column. With a null value we could just treat it as absent, and any metadata columns that exist because of non-null values on other changes will just be filled in with null anyway.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah that makes sense. This would imply storing the union of every metadata key of every change in the documen in a lookup table somewhere in the serialized document chunk right?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah I think so. The first time a change has a non-null value in its metadata, we create a column identified by its metadata key and the type of the value. The serialised document will have to store every metadata column that exists on any of the changes. Changes that don't mention a particular metadata column just fill it in with null, as is the behaviour for the Automerge-internal columns.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This does mean that if a user never puts anything except null as the value for a metadata key then we would have to do something like not write it to the document at all right (because we don't know what column type to write). This means it would not be possible to distinguish between a null value and a not-present value. Maybe we should say that you can't write null values to avoid ambiguity?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yup agree, let's disallow nulls.

Copy link
Contributor

@alexjg alexjg Jan 13, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've been thinking a little bit more about this. At some point we're going to want to have some kind of squash/rebase workflow I think. In such a workflow we would need to decide what to do with the metadata on each change. I think ideally we would just encode all the metadata into the squashed change. This suggests to me that we should actually treat the metadata as a multimap, somewhat like the query parameters in a URL. @ept @paulsonnentag what do you think?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What would a squash look like? Could a user still override the metadata to set it to something custom for the squashed change, or would it be purely mechanical that the metadata of the squashed changes would always be the union of the metadata of the individual changes?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@alexjg Good point. I'd think that a squash commit would need to bring in custom logic for compacting the metadata: for example, we might not want to keep every single timestamp, but only the minimum and maximum among the timestamps in the squashed range. For authors we might want to keep the set of distinct users who have contributed at least one change, and for signatures we might want to keep the most recent signature per branch per signing key. This suggests to me that we can keep the data model for metadata on a single change simple (a single value per entry in the map), and figure out how to represent changes on squash commits once we get to that point.

I just realised something: if we do author attribution using metadata on changes, it would probably not be possible to do attribution on a squash commit or shallow clone, because the per-change information is not available. On the other hand, if we do attribution by mapping actor IDs to user IDs, attribution should still be possible, because the squash should preserve opIds, and the actor-to-user mapping can be included in the squash. That would be an argument for using the actorIds for attribution.


/** A function that defines default meta data for each change on the handle
*
* @remarks
* This function can be defined globally on the {@link Repo} and is passed down to all {@link DocHandle}.
* The metadata can be override by explicitly passing metadata in {@link DocHandle.change} or {@link DocHandle.changeAt}.
* */
export type ChangeMetadataFunction = (
documentId: DocumentId
) => ChangeMetadata | undefined

export interface DocHandleMessagePayload {
destinationId: PeerId
documentId: DocumentId
Expand Down
36 changes: 27 additions & 9 deletions packages/automerge-repo/src/Repo.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,12 @@ import {
interpretAsDocumentId,
parseAutomergeUrl,
} from "./AutomergeUrl.js"
import { DocHandle, DocHandleEncodedChangePayload } from "./DocHandle.js"
import {
ChangeMetadataFunction,
DocHandle,
DocHandleEncodedChangePayload,
DocHandleChangeOptions,
} from "./DocHandle.js"
import { RemoteHeadsSubscriptions } from "./RemoteHeadsSubscriptions.js"
import { headsAreSame } from "./helpers/headsAreSame.js"
import { throttle } from "./helpers/throttle.js"
Expand Down Expand Up @@ -55,18 +60,22 @@ export class Repo extends EventEmitter<RepoEvents> {
#remoteHeadsSubscriptions = new RemoteHeadsSubscriptions()
#remoteHeadsGossipingEnabled = false

#changeMetadata: ChangeMetadataFunction

constructor({
storage,
network,
peerId,
sharePolicy,
isEphemeral = storage === undefined,
enableRemoteHeadsGossiping = false,
changeMetadata = () => undefined,
}: RepoConfig) {
super()
this.#remoteHeadsGossipingEnabled = enableRemoteHeadsGossiping
this.#log = debug(`automerge-repo:repo`)
this.sharePolicy = sharePolicy ?? this.sharePolicy
this.#changeMetadata = changeMetadata

// DOC COLLECTION

Expand Down Expand Up @@ -323,15 +332,18 @@ export class Repo extends EventEmitter<RepoEvents> {
/** The documentId of the handle to look up or create */
documentId: DocumentId,

/** If we know we're creating a new document, specify this so we can have access to it immediately */
isNew: boolean
/** When creating a handle for a new doc set init to true or pass in a init options object */
init?: DocHandleChangeOptions<T>
) {
// If we have the handle cached, return it
if (this.#handleCache[documentId]) return this.#handleCache[documentId]

// If not, create a new handle, cache it, and return it
if (!documentId) throw new Error(`Invalid documentId ${documentId}`)
const handle = new DocHandle<T>(documentId, { isNew })
const handle = new DocHandle<T>(documentId, {
changeMetadata: this.#changeMetadata,
init,
})
this.#handleCache[documentId] = handle
return handle
}
Expand All @@ -355,7 +367,7 @@ export class Repo extends EventEmitter<RepoEvents> {
* an empty object `{}`. Its documentId is generated by the system. we emit a `document` event
* to advertise interest in the document.
*/
create<T>(): DocHandle<T> {
create<T>(options: DocHandleChangeOptions<T> = {}): DocHandle<T> {
// TODO:
// either
// - pass an initial value and do something like this to ensure that you get a valid initial value
Expand All @@ -376,7 +388,7 @@ export class Repo extends EventEmitter<RepoEvents> {

// Generate a new UUID and store it in the buffer
const { documentId } = parseAutomergeUrl(generateAutomergeUrl())
const handle = this.#getHandle<T>(documentId, true) as DocHandle<T>
const handle = this.#getHandle<T>(documentId, options) as DocHandle<T>
this.emit("document", { handle, isNew: true })
return handle
}
Expand Down Expand Up @@ -442,7 +454,7 @@ export class Repo extends EventEmitter<RepoEvents> {
return this.#handleCache[documentId]
}

const handle = this.#getHandle<T>(documentId, false) as DocHandle<T>
const handle = this.#getHandle<T>(documentId) as DocHandle<T>
this.emit("document", { handle, isNew: false })
return handle
}
Expand All @@ -453,7 +465,7 @@ export class Repo extends EventEmitter<RepoEvents> {
) {
const documentId = interpretAsDocumentId(id)

const handle = this.#getHandle(documentId, false)
const handle = this.#getHandle(documentId)
handle.delete()

delete this.#handleCache[documentId]
Expand All @@ -470,7 +482,7 @@ export class Repo extends EventEmitter<RepoEvents> {
async export(id: AnyDocumentId): Promise<Uint8Array | undefined> {
const documentId = interpretAsDocumentId(id)

const handle = this.#getHandle(documentId, false)
const handle = this.#getHandle(documentId)
const doc = await handle.doc()
if (!doc) return undefined
return Automerge.save(doc)
Expand Down Expand Up @@ -532,6 +544,12 @@ export interface RepoConfig {
*/
sharePolicy?: SharePolicy

/**
* Define default meta data that is added to each change made through the repo.
* This function is called inside of {@link DocHandle} on each change.
*/
changeMetadata?: ChangeMetadataFunction

/**
* Whether to enable the experimental remote heads gossiping feature
*/
Expand Down
Loading
Loading